Skip to content

Commit

Permalink
commented files and added some analysis
Browse files Browse the repository at this point in the history
  • Loading branch information
vibhaa committed Dec 31, 2015
1 parent dae7609 commit 77ae42c
Show file tree
Hide file tree
Showing 21 changed files with 120 additions and 16 deletions.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added Analysis/500 Flows Basic Dleft.xlsx
Binary file not shown.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added Analysis/Heuristic Comparison with Normal 1.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added Analysis/Heuristic Comparison with Normal 2.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added Analysis/Workspace.xlsx
Binary file not shown.
12 changes: 11 additions & 1 deletion code/AssymetricHashTableSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
import java.util.*;

/* hash table simulation to track the unique flows experiencing loss
using the D-Left hashing procedure where each flow id is hashed exactly
d times to generate d locations where the flow and its loss might be
stored
runs experiments on a certain number of flows and for a given table size
across multiple thresholds and averages out the results across a preset
number of trials and reports the results in a csv format
*/

public class AssymetricHashTableSimulation{
public static void main(String[] args){
final int numberOfTrials = 1000;
Expand Down Expand Up @@ -63,7 +73,7 @@ public static void main(String[] args){
double cumErrorMargin = 0;
int errorInBinaryAnswer = 0;
for (int i = 0; i < numberOfTrials; i++){
Collections.shuffle(packets);
//Collections.shuffle(packets); - don't shuffle for worst case scenario of all big losers being at the end
FlowWithCount.reset(buckets);
droppedPacketInfoCount = 0;
totalNumberOfPackets = 0; // needed for the denominator to compute the threshold for loss count
Expand Down
7 changes: 7 additions & 0 deletions code/CountMinLossyFlowIdentifier.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
import java.util.*;

/* incomplete code and hasn't been used */

/* Data Structure that identifies the big losers
by maintaining a count-min sketch and a heap that
is updated for every incoming packet by checking
if the flow it belongs to has exceeded the threshold*/

public class CountMinLossyFlowIdentifier{
public TreeMap<Long, Integer> heavyhitters;
public final Comparator lossyFlowComparator;
Expand Down
11 changes: 9 additions & 2 deletions code/EvictingHashTableSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
import java.util.*;

/* eviction heuristic (compare the incoming flow's loss against that of
the flow at the last of the d locations) added to the standard
hash table simulation to track the unique flows experiencing loss
using the D-Left hashing procedure where each flow id is hashed exactly
d times to generate d locations where the flow and its loss might be
stored*/

public class EvictingHashTableSimulation{
public static void main(String[] args){
int numberOfTrials = Integer.parseInt(args[0]);
Expand Down Expand Up @@ -97,8 +104,8 @@ public static void main(String[] args){
if (k == D) {
if (countMinSketch.estimateLossCount(buckets[index].flowid) < countMinSketch.estimateLossCount(packets.get(j))){
packetsInfoDroppedAtFlow[packets.get(j) - 1] = 0;
packetsInfoDroppedAtFlow[buckets[index].flowid - 1] = buckets[index].count;
droppedPacketInfoCount = droppedPacketInfoCount + buckets[index].count - (int) countMinSketch.estimateLossCount(packets.get(j));
packetsInfoDroppedAtFlow[buckets[index].flowid - 1] = (int) buckets[index].count;
droppedPacketInfoCount = droppedPacketInfoCount + (int) buckets[index].count - (int) countMinSketch.estimateLossCount(packets.get(j));
buckets[index].flowid = packets.get(j);
buckets[index].count = (int) countMinSketch.estimateLossCount(packets.get(j));
}
Expand Down
9 changes: 9 additions & 0 deletions code/EvictingMinFlowIdHashTableSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,14 @@
import java.util.*;

/* eviction heuristic (compare the incoming flow's loss against that of
the flow with minimum loss amongst flows at all d locations)
added to the standard hash table simulation to track
the unique flows experiencing loss using the D-Left hashing procedure */

/* version of hash table simulation that uses the eviction heuristic
but only stores the flow id in the hash table
and uses the count-min sketch to estimate the loss count*/

public class EvictingMinFlowIdHashTableSimulation{
public static void main(String[] args){
int numberOfTrials = Integer.parseInt(args[0]);
Expand Down
19 changes: 13 additions & 6 deletions code/EvictingMinHashTableSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
import java.util.*;

/* version of hash table simulation that stores both flow id and count
in the hash table, but uses the count-min sketch to estimate the loss
for a flow that is not yet in the hash table and just came in; this is
used to perform the eviction heuristic */

/* NOTE: Accuracy isn't better than just maintaining the flow id in
the hash table - simulations are in Smart Eviction Simulation file*/

public class EvictingMinHashTableSimulation{
public static void main(String[] args){
int numberOfTrials = Integer.parseInt(args[0]);
Expand Down Expand Up @@ -75,7 +83,7 @@ public static void main(String[] args){
// keep track of which of the d locations has the minimum lost packet count
// use this location to place the incoming flow if there is a collision
int minIndex = 0;
int minValue = -1;
long minValue = -1;

for (k = 0; k < D; k++){
index = ((hashA[k]*packets.get(j) + hashB[k]) % P) % (tableSize/D) + (k*tableSize/D);
Expand Down Expand Up @@ -103,14 +111,13 @@ public static void main(String[] args){
// none of the D locations were free - hash collission
// decide if one of them is worth evicting

// TODO: figure out if the incoming flow has a higher loss than one of the existing flows in the table
// find a way of tracking the information of the incoming flow because it isnt the hash table
// so we don't have information on what its loss count is nd the very first time it comes in, loss is 0
// figure out if the incoming flow has a higher loss than the flow that has the minimum loss amongst
// the d existing flows in the table
if (k == D) {
if (countMinSketch.estimateLossCount(buckets[minIndex].flowid) < countMinSketch.estimateLossCount(packets.get(j))){
packetsInfoDroppedAtFlow[packets.get(j) - 1] = 0;
packetsInfoDroppedAtFlow[buckets[minIndex].flowid - 1] = buckets[minIndex].count;
droppedPacketInfoCount = droppedPacketInfoCount + buckets[minIndex].count - (int) countMinSketch.estimateLossCount(packets.get(j));
packetsInfoDroppedAtFlow[buckets[minIndex].flowid - 1] = (int) buckets[minIndex].count;
droppedPacketInfoCount = droppedPacketInfoCount + (int) buckets[minIndex].count - (int) countMinSketch.estimateLossCount(packets.get(j));
buckets[minIndex].flowid = packets.get(j);
buckets[minIndex].count = (int) countMinSketch.estimateLossCount(packets.get(j));
}
Expand Down
7 changes: 5 additions & 2 deletions code/FlowWithCount.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
/*Data Structure used as a building block for the hash table
that tracks the lossy packets*/

public class FlowWithCount{
int count;
long count;
int flowid;

public FlowWithCount(int flowid ,int count){
public FlowWithCount(int flowid ,long count){
this.count = count;
this.flowid = flowid;
}
Expand Down
6 changes: 6 additions & 0 deletions code/HashTableSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
import java.util.*;

/* hash table simulation to track the unique flows experiencing loss
using the standard technique where each flow id is hashed exactly
once to generate one location where the flow and its loss are
stored*/

public class HashTableSimulation{
public static void main(String[] args){
int numberOfTrials = Integer.parseInt(args[0]);
Expand All @@ -25,6 +30,7 @@ public static void main(String[] args){
buckets[j] = new FlowWithCount(0, 0);
}

// create a set of lost packets which consists of i lost packets of flow i
List<Integer> packets = new ArrayList<>();
// add i packets of flowid i
for (int i = 1; i <= numberOfFlows; i++)
Expand Down
3 changes: 3 additions & 0 deletions code/LossInducer.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import java.util.*;

/* class that facilitates loss creation on a stream of packets
by choosing flow(s) to be loss and returning all packets but
those belonging to the flow*/
public class LossInducer{
private int numberOfLossyFlows;
private int granularity;
Expand Down
7 changes: 7 additions & 0 deletions code/LossyFlowIdentifier.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,12 @@
import java.util.*;

/* high level procedure that uses packet info from a csv file, parses it,
induces loss on it, and produces a sketch for the lost packet on which
the big loser identification process is performed
initially written for the sketches to all be reversible so that the
reversibility procedure would identify the lossy buckets - unused
code in the context of the hash table approach*/
public class LossyFlowIdentifier{
public static PriorityQueue<LossyFlow> HeapOfLossyFlows;
private class BucketMatrixIndex{
Expand Down
2 changes: 2 additions & 0 deletions code/Packet.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import java.util.*;

/* abstraction for Packets in a network that helps retrieve
the key fields from the packet*/
public class Packet{
private long srcip;
private long dstip;
Expand Down
10 changes: 10 additions & 0 deletions code/Sketch.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
import java.*;

/* count min sketch that is used to maintain a summary data structure
to estimate the frequency of occurence of certain items in a data
stream
used in this context to count the number of packets per flow that
have been lost
initially used to simulate reversible sketches too - hence, there
is some commented out code that belongs to that technique
*/
public class Sketch{
private int size; // K or the max number of buckets in a hash table
private int numberOfHashFunctions; // H
Expand Down
25 changes: 22 additions & 3 deletions code/SmartEvictionHashTableWithCountSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,24 @@
import java.util.*;


/* eviction heuristic (compare the incoming flow's loss against that of
the flow with minimum loss amongst flows at all d locations)
added to the standard hash table simulation to track
the unique flows experiencing loss using the D-Left hashing procedure */

/* NOTE: Accuracy isn't better than just maintaining the flow id in
the hash table - simulations are in Smart Eviction Simulation file*/

/* version of hash table simulation that stores both flow id and count
in the hash table, but uses the count-min sketch to estimate the loss
for a flow that is not yet in the hash table and just came in; this is
used to perform the eviction heuristic
runs experiments on a certain number of flows and for a given table size
across multiple thresholds and averages out the results across a preset
number of trials and reports the results in a csv format
*/

public class SmartEvictionHashTableWithCountSimulation{
public static void main(String[] args){
final int numberOfTrials = 1000;
Expand Down Expand Up @@ -87,7 +106,7 @@ public static void main(String[] args){
// keep track of which of the d locations has the minimum lost packet count
// use this location to place the incoming flow if there is a collision
int minIndex = 0;
int minValue = -1;
long minValue = -1;

for (k = 0; k < D; k++){
int index = ((hashA[k]*packets.get(j) + hashB[k]) % P) % (tableSize[tableSize_index]/D) + (k*tableSize[tableSize_index]/D);
Expand Down Expand Up @@ -118,8 +137,8 @@ public static void main(String[] args){
if (k == D) {
if (countMinSketch.estimateLossCount(buckets[minIndex].flowid) < countMinSketch.estimateLossCount(packets.get(j))){
packetsInfoDroppedAtFlow[packets.get(j) - 1] = 0;
packetsInfoDroppedAtFlow[buckets[minIndex].flowid - 1] = buckets[minIndex].count;
droppedPacketInfoCount = droppedPacketInfoCount + buckets[minIndex].count - (int) countMinSketch.estimateLossCount(packets.get(j));
packetsInfoDroppedAtFlow[buckets[minIndex].flowid - 1] = (int) buckets[minIndex].count;
droppedPacketInfoCount = droppedPacketInfoCount + (int) buckets[minIndex].count - (int) countMinSketch.estimateLossCount(packets.get(j));
buckets[minIndex].flowid = packets.get(j);
buckets[minIndex].count = (int) countMinSketch.estimateLossCount(packets.get(j));
}
Expand Down
18 changes: 16 additions & 2 deletions code/SmartEvictionHashTableWithoutCountSimulation.java
Original file line number Diff line number Diff line change
@@ -1,5 +1,19 @@
import java.util.*;

/* eviction heuristic (compare the incoming flow's loss against that of
the flow with minimum loss amongst flows at all d locations)
added to the standard hash table simulation to track
the unique flows experiencing loss using the D-Left hashing procedure */

/* version of hash table simulation that uses the eviction heuristic
but only stores the flow id in the hash table
and uses the count-min sketch to estimate the loss count
runs experiments on a certain number of flows and for a given table size
across multiple thresholds and averages out the results across a preset
number of trials and reports the results in a csv format
*/

public class SmartEvictionHashTableWithoutCountSimulation{
public static void main(String[] args){
final int numberOfTrials = 1000;
Expand Down Expand Up @@ -30,7 +44,7 @@ public static void main(String[] args){


/*Sketch that maintains the loss of each flow* --- CHANGE THIS SIZE TOO */
Sketch countMinSketch = new Sketch(100, 3, numberOfFlows[flowSize_index]);
Sketch countMinSketch = new Sketch((int) tableSize[tableSize_index]/3, 3, numberOfFlows[flowSize_index]);

// create a set of lost packets which consists of i lost packets of flow i
ArrayList<Integer> packets = new ArrayList<Integer>();
Expand Down Expand Up @@ -61,7 +75,7 @@ public static void main(String[] args){
double cumErrorMargin = 0;
int errorInBinaryAnswer = 0;
for (int i = 0; i < numberOfTrials; i++){
Collections.shuffle(packets);
//Collections.shuffle(packets);
countMinSketch.reset();

//FlowWithCount.reset(buckets);
Expand Down

0 comments on commit 77ae42c

Please sign in to comment.