Merge fullstack_asterix_stabilization into fullstack_hyracks_result_distribution branch.

git-svn-id: https://hyracks.googlecode.com/svn/branches/fullstack_hyracks_result_distribution@2862 123451ca-8445-de46-9d55-352943316053
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
index b8efedc..45376e2 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0, edges=(1,))
-1|Vertex(id=1,value=0, edges=(1,2,))
-2|Vertex(id=2,value=0, edges=(1,2,3,))
-3|Vertex(id=3,value=0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0, edges=(11,))
-11|Vertex(id=11,value=0, edges=(11,12,))
-12|Vertex(id=12,value=0, edges=(11,12,13,))
-13|Vertex(id=13,value=0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
index ad448b2..dbc30fc 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ConnectedComponentsRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=0, edges=(1,50,))
-1|Vertex(id=1,value=0, edges=(1,2,))
-2|Vertex(id=2,value=0, edges=(1,2,3,))
-3|Vertex(id=3,value=0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0, edges=(11,99,))
-11|Vertex(id=11,value=0, edges=(11,12,101,))
-12|Vertex(id=12,value=0, edges=(11,12,13,))
-13|Vertex(id=13,value=0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=21, edges=(22,23,24,))
-25|Vertex(id=25,value=25, edges=())
-27|Vertex(id=27,value=27, edges=())
+0 0
+1 0
+2 0
+3 0
+4 0
+5 0
+6 0
+7 0
+8 0
+9 0
+10 0
+11 0
+12 0
+13 0
+14 0
+15 0
+16 0
+17 0
+18 0
+19 0
+21 21
+25 25
+27 27
diff --git a/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
new file mode 100644
index 0000000..d238037
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/MaximalClique.result
@@ -0,0 +1,7 @@
+1 1,2,3,4;
+2 2,3,4;
+3 
+4 
+5 
+6 
+7 
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
index f38e191..9c4d83a 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRank.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329425, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329425
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
index ab05d38..6432eda 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329426, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
index 1fc108a..2bd09e1 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=0.0072088164890121405, edges=(1,50,))
-1|Vertex(id=1,value=0.12352056961948686, edges=(1,2,))
-2|Vertex(id=2,value=0.12045670441668178, edges=(1,2,3,))
-3|Vertex(id=3,value=0.06798545786459467, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03387281259892814, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.01942600635480669, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.013661020012182747, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.0109034351563503, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.009241684574402657, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.008082028259564783, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.007208817414047232, edges=(11,99,))
-11|Vertex(id=11,value=0.07555839219845861, edges=(11,12,101,))
-12|Vertex(id=12,value=0.07249452699565352, edges=(11,12,13,))
-13|Vertex(id=13,value=0.05063539695954156, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.029644452692487822, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.018670183493927354, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.013558283213067561, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.010892790899883237, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.009240874593661061, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.008081987856433137, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0.006521739130434782, edges=(22,23,24,))
-25|Vertex(id=25,value=0.006521739130434782, edges=())
-27|Vertex(id=27,value=0.006521739130434782, edges=())
+0 0.0072088164890121405
+1 0.12352056961948686
+2 0.12045670441668178
+3 0.06798545786459467
+4 0.03387281259892814
+5 0.01942600635480669
+6 0.013661020012182747
+7 0.0109034351563503
+8 0.009241684574402657
+9 0.008082028259564783
+10 0.007208817414047232
+11 0.07555839219845861
+12 0.07249452699565352
+13 0.05063539695954156
+14 0.029644452692487822
+15 0.018670183493927354
+16 0.013558283213067561
+17 0.010892790899883237
+18 0.009240874593661061
+19 0.008081987856433137
+21 0.006521739130434782
+25 0.006521739130434782
+27 0.006521739130434782
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
new file mode 100644
index 0000000..6432eda
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealDynamic.result
@@ -0,0 +1,20 @@
+0 0.008290140026154316
+1 0.1535152819247165
+2 0.14646839195826475
+3 0.08125113985998214
+4 0.03976979906329426
+5 0.0225041581462058
+6 0.015736276824953852
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.15351528192471647
+12 0.14646839195826472
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
index ab05d38..9a747a6 100755
--- a/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/PageRankRealNoCombiner.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.008290140026154316, edges=(1,))
-1|Vertex(id=1,value=0.1535152819247165, edges=(1,2,))
-2|Vertex(id=2,value=0.14646839195826475, edges=(1,2,3,))
-3|Vertex(id=3,value=0.08125113985998214, edges=(1,2,3,4,))
-4|Vertex(id=4,value=0.03976979906329426, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=0.0225041581462058, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=0.015736276824953852, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=0.012542224114863661, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=0.010628239626209894, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=0.009294348455354817, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=0.008290140026154316, edges=(11,))
-11|Vertex(id=11,value=0.15351528192471647, edges=(11,12,))
-12|Vertex(id=12,value=0.14646839195826472, edges=(11,12,13,))
-13|Vertex(id=13,value=0.08125113985998214, edges=(11,12,13,14,))
-14|Vertex(id=14,value=0.03976979906329426, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=0.0225041581462058, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=0.015736276824953852, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=0.012542224114863661, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=0.010628239626209894, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=0.009294348455354817, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.008290140026154316
+1 0.15351528192471647
+2 0.14646839195826475
+3 0.08125113985998211
+4 0.03976979906329425
+5 0.0225041581462058
+6 0.01573627682495385
+7 0.012542224114863661
+8 0.010628239626209894
+9 0.009294348455354817
+10 0.008290140026154316
+11 0.1535152819247165
+12 0.14646839195826475
+13 0.08125113985998214
+14 0.03976979906329426
+15 0.0225041581462058
+16 0.015736276824953852
+17 0.012542224114863661
+18 0.010628239626209894
+19 0.009294348455354817
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
index 74113a8..a1dfc0f 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplex.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=2, edges=(1,50,))
-1|Vertex(id=1,value=3, edges=(1,2,))
-2|Vertex(id=2,value=1, edges=(1,2,3,))
-3|Vertex(id=3,value=1, edges=(1,2,3,4,))
-4|Vertex(id=4,value=1, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=1, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=1, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=1, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=3, edges=(11,99,))
-11|Vertex(id=11,value=2, edges=(11,12,101,))
-12|Vertex(id=12,value=2, edges=(11,12,13,))
-13|Vertex(id=13,value=2, edges=(11,12,13,14,))
-14|Vertex(id=14,value=2, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=2, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=2, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=2, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=2, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=2, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0, edges=(22,23,24,))
-25|Vertex(id=25,value=0, edges=())
-27|Vertex(id=27,value=0, edges=())
+0 2
+1 3
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 3
+11 2
+12 2
+13 2
+14 2
+15 2
+16 2
+17 2
+18 2
+19 2
+21 0
+25 0
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
index ea0edc2..1693fb2 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ReachibilityRealComplexNoConnectivity.result
@@ -1,23 +1,23 @@
-0|Vertex(id=0,value=1, edges=(1,50,))
-1|Vertex(id=1,value=1, edges=(1,2,))
-2|Vertex(id=2,value=1, edges=(1,2,3,))
-3|Vertex(id=3,value=1, edges=(1,2,3,4,))
-4|Vertex(id=4,value=1, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=1, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=1, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=1, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=1, edges=(11,99,))
-11|Vertex(id=11,value=1, edges=(11,12,101,))
-12|Vertex(id=12,value=1, edges=(11,12,13,))
-13|Vertex(id=13,value=1, edges=(11,12,13,14,))
-14|Vertex(id=14,value=1, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=1, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=1, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=1, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=1, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=1, edges=(0,11,12,13,14,15,16,17,18,19,))
-21|Vertex(id=21,value=0, edges=(22,23,24,))
-25|Vertex(id=25,value=2, edges=())
-27|Vertex(id=27,value=0, edges=())
+0 1
+1 1
+2 1
+3 1
+4 1
+5 1
+6 1
+7 1
+8 1
+9 1
+10 1
+11 1
+12 1
+13 1
+14 1
+15 1
+16 1
+17 1
+18 1
+19 1
+21 0
+25 2
+27 0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
index 7bb0ca3..46d1c73 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPaths.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.0, edges=(1,))
-1|Vertex(id=1,value=0.0, edges=(1,2,))
-2|Vertex(id=2,value=100.0, edges=(1,2,3,))
-3|Vertex(id=3,value=300.0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=600.0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=1000.0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=1500.0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=2100.0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=2800.0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=3600.0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=4500.0, edges=(11,))
-11|Vertex(id=11,value=5500.0, edges=(11,12,))
-12|Vertex(id=12,value=6600.0, edges=(11,12,13,))
-13|Vertex(id=13,value=7800.0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=9100.0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=10500.0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=12000.0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=13600.0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=15300.0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=17100.0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.0
+1 0.0
+2 100.0
+3 300.0
+4 600.0
+5 1000.0
+6 1500.0
+7 2100.0
+8 2800.0
+9 3600.0
+10 4500.0
+11 5500.0
+12 6600.0
+13 7800.0
+14 9100.0
+15 10500.0
+16 12000.0
+17 13600.0
+18 15300.0
+19 17100.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
index f2c31a6..b42462f 100644
--- a/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
+++ b/pregelix/pregelix-example/src/test/resources/expected/ShortestPathsReal.result
@@ -1,20 +1,20 @@
-0|Vertex(id=0,value=0.0, edges=(1,))
-1|Vertex(id=1,value=1.0, edges=(1,2,))
-2|Vertex(id=2,value=2.0, edges=(1,2,3,))
-3|Vertex(id=3,value=3.0, edges=(1,2,3,4,))
-4|Vertex(id=4,value=4.0, edges=(1,2,3,4,5,))
-5|Vertex(id=5,value=5.0, edges=(1,2,3,4,5,6,))
-6|Vertex(id=6,value=6.0, edges=(1,2,3,4,5,6,7,))
-7|Vertex(id=7,value=7.0, edges=(1,2,3,4,5,6,7,8,))
-8|Vertex(id=8,value=8.0, edges=(1,2,3,4,5,6,7,8,9,))
-9|Vertex(id=9,value=9.0, edges=(1,2,3,4,5,6,7,8,9,10,))
-10|Vertex(id=10,value=10.0, edges=(11,))
-11|Vertex(id=11,value=11.0, edges=(11,12,))
-12|Vertex(id=12,value=12.0, edges=(11,12,13,))
-13|Vertex(id=13,value=13.0, edges=(11,12,13,14,))
-14|Vertex(id=14,value=14.0, edges=(11,12,13,14,15,))
-15|Vertex(id=15,value=15.0, edges=(11,12,13,14,15,16,))
-16|Vertex(id=16,value=16.0, edges=(11,12,13,14,15,16,17,))
-17|Vertex(id=17,value=17.0, edges=(11,12,13,14,15,16,17,18,))
-18|Vertex(id=18,value=18.0, edges=(11,12,13,14,15,16,17,18,19,))
-19|Vertex(id=19,value=19.0, edges=(0,11,12,13,14,15,16,17,18,19,))
+0 0.0
+1 1.0
+2 2.0
+3 3.0
+4 4.0
+5 5.0
+6 6.0
+7 7.0
+8 8.0
+9 9.0
+10 10.0
+11 11.0
+12 12.0
+13 13.0
+14 14.0
+15 15.0
+16 16.0
+17 17.0
+18 18.0
+19 19.0
diff --git a/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
new file mode 100644
index 0000000..4818e13
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/expected/TriangleCounting.result
@@ -0,0 +1,7 @@
+1 3
+2 2
+3 0
+4 0
+5 1
+6 0
+7 0
diff --git a/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
index 1b9a4d6..71450f1 100644
--- a/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
+++ b/pregelix/pregelix-example/src/test/resources/hadoop/conf/mapred-site.xml
@@ -18,8 +18,8 @@
       <value>20</value>
    </property>
    <property>
-      <name>mapred.min.split.size</name>
-      <value>65536</value>
+      <name>mapred.max.split.size</name>
+      <value>128</value>
    </property>
 
 </configuration>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
new file mode 100644
index 0000000..616c647
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/MaximalClique.xml
@@ -0,0 +1,142 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Maximal Clique</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueVertex$MaximalCliqueVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.maximalclique.TextMaximalCliqueInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.maximalclique.MaximalCliqueAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
index e425b38..744e5b0 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRank.xml
@@ -123,7 +123,7 @@
 <property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
 <property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
 <property><name>hadoop.logfile.size</name><value>10000000</value></property>
-<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexInputFormat</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
 <property><name>mapred.job.queue.name</name><value>default</value></property>
 <property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
 <property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
new file mode 100644
index 0000000..c1a04ae
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/PageRankRealDynamic.xml
@@ -0,0 +1,143 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/webmap</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/result</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>PageRank</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>pregelix.numVertices</name><value>20</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>pregelix.combinerClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimpleSumCombiner</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>pregelix.incStateLength</name><value>true</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
index 3719247..9e791e2 100644
--- a/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
+++ b/pregelix/pregelix-example/src/test/resources/jobs/ShortestPaths.xml
@@ -124,7 +124,7 @@
 <property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
 <property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
 <property><name>hadoop.logfile.size</name><value>10000000</value></property>
-<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimplePageRankVertexInputFormat</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.PageRankVertex$SimulatedPageRankVertexInputFormat</value></property>
 <property><name>mapred.job.queue.name</name><value>default</value></property>
 <property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
 <property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
diff --git a/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
new file mode 100644
index 0000000..ee2acc1
--- /dev/null
+++ b/pregelix/pregelix-example/src/test/resources/jobs/TriangleCounting.xml
@@ -0,0 +1,141 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?><configuration>
+<property><name>mapred.tasktracker.dns.nameserver</name><value>default</value></property>
+<property><name>mapred.queue.default.acl-administer-jobs</name><value>*</value></property>
+<property><name>mapred.skip.map.auto.incr.proc.count</name><value>true</value></property>
+<property><name>mapred.jobtracker.instrumentation</name><value>org.apache.hadoop.mapred.JobTrackerMetricsInst</value></property>
+<property><name>mapred.skip.reduce.auto.incr.proc.count</name><value>true</value></property>
+<property><name>fs.hsftp.impl</name><value>org.apache.hadoop.hdfs.HsftpFileSystem</value></property>
+<property><name>mapred.input.dir</name><value>file:/clique</value></property>
+<property><name>mapred.submit.replication</name><value>10</value></property>
+<property><name>ipc.server.tcpnodelay</name><value>false</value></property>
+<property><name>fs.checkpoint.dir</name><value>${hadoop.tmp.dir}/dfs/namesecondary</value></property>
+<property><name>mapred.output.compression.type</name><value>RECORD</value></property>
+<property><name>mapred.job.shuffle.merge.percent</name><value>0.66</value></property>
+<property><name>mapred.child.java.opts</name><value>-Xmx200m</value></property>
+<property><name>mapred.queue.default.acl-submit-job</name><value>*</value></property>
+<property><name>keep.failed.task.files</name><value>false</value></property>
+<property><name>mapred.jobtracker.job.history.block.size</name><value>3145728</value></property>
+<property><name>io.bytes.per.checksum</name><value>512</value></property>
+<property><name>mapred.task.tracker.report.address</name><value>127.0.0.1:0</value></property>
+<property><name>hadoop.util.hash.type</name><value>murmur</value></property>
+<property><name>fs.hdfs.impl</name><value>org.apache.hadoop.hdfs.DistributedFileSystem</value></property>
+<property><name>fs.ramfs.impl</name><value>org.apache.hadoop.fs.InMemoryFileSystem</value></property>
+<property><name>mapred.jobtracker.restart.recover</name><value>false</value></property>
+<property><name>fs.hftp.impl</name><value>org.apache.hadoop.hdfs.HftpFileSystem</value></property>
+<property><name>fs.checkpoint.period</name><value>3600</value></property>
+<property><name>mapred.child.tmp</name><value>./tmp</value></property>
+<property><name>mapred.local.dir.minspacekill</name><value>0</value></property>
+<property><name>map.sort.class</name><value>org.apache.hadoop.util.QuickSort</value></property>
+<property><name>hadoop.logfile.count</name><value>10</value></property>
+<property><name>ipc.client.connection.maxidletime</name><value>10000</value></property>
+<property><name>mapred.output.dir</name><value>/resultclique</value></property>
+<property><name>io.map.index.skip</name><value>0</value></property>
+<property><name>mapred.tasktracker.expiry.interval</name><value>600000</value></property>
+<property><name>mapred.output.compress</name><value>false</value></property>
+<property><name>io.seqfile.lazydecompress</name><value>true</value></property>
+<property><name>mapred.reduce.parallel.copies</name><value>5</value></property>
+<property><name>fs.checkpoint.size</name><value>67108864</value></property>
+<property><name>mapred.job.reduce.input.buffer.percent</name><value>0.0</value></property>
+<property><name>mapred.job.name</name><value>Triangle Counting</value></property>
+<property><name>local.cache.size</name><value>10737418240</value></property>
+<property><name>fs.s3n.impl</name><value>org.apache.hadoop.fs.s3native.NativeS3FileSystem</value></property>
+<property><name>mapred.userlog.limit.kb</name><value>0</value></property>
+<property><name>fs.file.impl</name><value>org.apache.hadoop.fs.LocalFileSystem</value></property>
+<property><name>mapred.task.tracker.http.address</name><value>0.0.0.0:50060</value></property>
+<property><name>mapred.task.timeout</name><value>600000</value></property>
+<property><name>fs.kfs.impl</name><value>org.apache.hadoop.fs.kfs.KosmosFileSystem</value></property>
+<property><name>mapred.max.tracker.blacklists</name><value>4</value></property>
+<property><name>fs.s3.buffer.dir</name><value>${hadoop.tmp.dir}/s3</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.dir</name><value>/jobtracker/jobsInfo</value></property>
+<property><name>ipc.client.kill.max</name><value>10</value></property>
+<property><name>mapred.tasktracker.instrumentation</name><value>org.apache.hadoop.mapred.TaskTrackerMetricsInst</value></property>
+<property><name>mapred.reduce.tasks.speculative.execution</name><value>true</value></property>
+<property><name>io.sort.record.percent</name><value>0.05</value></property>
+<property><name>hadoop.security.authorization</name><value>false</value></property>
+<property><name>mapred.max.tracker.failures</name><value>4</value></property>
+<property><name>mapred.jobtracker.taskScheduler</name><value>org.apache.hadoop.mapred.JobQueueTaskScheduler</value></property>
+<property><name>mapred.tasktracker.dns.interface</name><value>default</value></property>
+<property><name>mapred.map.tasks</name><value>2</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.hours</name><value>0</value></property>
+<property><name>fs.s3.sleepTimeSeconds</name><value>10</value></property>
+<property><name>fs.default.name</name><value>file:///</value></property>
+<property><name>tasktracker.http.threads</name><value>40</value></property>
+<property><name>mapred.tasktracker.taskmemorymanager.monitoring-interval</name><value>5000</value></property>
+<property><name>hadoop.rpc.socket.factory.class.default</name><value>org.apache.hadoop.net.StandardSocketFactory</value></property>
+<property><name>mapred.reduce.tasks</name><value>1</value></property>
+<property><name>topology.node.switch.mapping.impl</name><value>org.apache.hadoop.net.ScriptBasedMapping</value></property>
+<property><name>pregelix.vertexClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex</value></property>
+<property><name>mapred.skip.reduce.max.skip.groups</name><value>0</value></property>
+<property><name>io.file.buffer.size</name><value>4096</value></property>
+<property><name>mapred.jobtracker.maxtasks.per.job</name><value>-1</value></property>
+<property><name>mapred.tasktracker.indexcache.mb</name><value>10</value></property>
+<property><name>mapred.tasktracker.map.tasks.maximum</name><value>2</value></property>
+<property><name>fs.har.impl.disable.cache</name><value>true</value></property>
+<property><name>mapred.task.profile.maps</name><value>0-2</value></property>
+<property><name>hadoop.native.lib</name><value>true</value></property>
+<property><name>fs.s3.block.size</name><value>67108864</value></property>
+<property><name>mapred.job.reuse.jvm.num.tasks</name><value>1</value></property>
+<property><name>mapred.job.tracker.http.address</name><value>0.0.0.0:50030</value></property>
+<property><name>mapred.tasktracker.reduce.tasks.maximum</name><value>2</value></property>
+<property><name>io.compression.codecs</name><value>org.apache.hadoop.io.compress.DefaultCodec,org.apache.hadoop.io.compress.GzipCodec,org.apache.hadoop.io.compress.BZip2Codec</value></property>
+<property><name>mapred.job.shuffle.input.buffer.percent</name><value>0.70</value></property>
+<property><name>io.seqfile.compress.blocksize</name><value>1000000</value></property>
+<property><name>mapred.queue.names</name><value>default</value></property>
+<property><name>fs.har.impl</name><value>org.apache.hadoop.fs.HarFileSystem</value></property>
+<property><name>io.mapfile.bloom.error.rate</name><value>0.005</value></property>
+<property><name>mapred.job.tracker</name><value>local</value></property>
+<property><name>io.skip.checksum.errors</name><value>false</value></property>
+<property><name>mapred.reduce.max.attempts</name><value>4</value></property>
+<property><name>fs.s3.maxRetries</name><value>4</value></property>
+<property><name>ipc.server.listen.queue.size</name><value>128</value></property>
+<property><name>fs.trash.interval</name><value>0</value></property>
+<property><name>mapred.local.dir.minspacestart</name><value>0</value></property>
+<property><name>fs.s3.impl</name><value>org.apache.hadoop.fs.s3.S3FileSystem</value></property>
+<property><name>io.seqfile.sorter.recordlimit</name><value>1000000</value></property>
+<property><name>io.mapfile.bloom.size</name><value>1048576</value></property>
+<property><name>io.sort.mb</name><value>100</value></property>
+<property><name>mapred.local.dir</name><value>${hadoop.tmp.dir}/mapred/local</value></property>
+<property><name>io.sort.factor</name><value>10</value></property>
+<property><name>mapred.task.profile</name><value>false</value></property>
+<property><name>job.end.retry.interval</name><value>30000</value></property>
+<property><name>mapred.tasktracker.procfsbasedprocesstree.sleeptime-before-sigkill</name><value>5000</value></property>
+<property><name>mapred.jobtracker.completeuserjobs.maximum</name><value>100</value></property>
+<property><name>mapred.task.profile.reduces</name><value>0-2</value></property>
+<property><name>webinterface.private.actions</name><value>false</value></property>
+<property><name>hadoop.tmp.dir</name><value>/tmp/hadoop-${user.name}</value></property>
+<property><name>mapred.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.skip.attempts.to.start.skipping</name><value>2</value></property>
+<property><name>mapred.temp.dir</name><value>${hadoop.tmp.dir}/mapred/temp</value></property>
+<property><name>mapred.merge.recordsBeforeProgress</name><value>10000</value></property>
+<property><name>mapred.map.output.compression.codec</name><value>org.apache.hadoop.io.compress.DefaultCodec</value></property>
+<property><name>mapred.compress.map.output</name><value>false</value></property>
+<property><name>io.sort.spill.percent</name><value>0.80</value></property>
+<property><name>fs.checkpoint.edits.dir</name><value>${fs.checkpoint.dir}</value></property>
+<property><name>mapred.userlog.retain.hours</name><value>24</value></property>
+<property><name>mapred.system.dir</name><value>${hadoop.tmp.dir}/mapred/system</value></property>
+<property><name>mapred.line.input.format.linespermap</name><value>1</value></property>
+<property><name>job.end.retry.attempts</name><value>0</value></property>
+<property><name>ipc.client.idlethreshold</name><value>4000</value></property>
+<property><name>pregelix.vertexOutputFormatClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingVertex$TriangleCountingVertexOutputFormat</value></property>
+<property><name>mapred.reduce.copy.backoff</name><value>300</value></property>
+<property><name>mapred.map.tasks.speculative.execution</name><value>true</value></property>
+<property><name>mapred.inmem.merge.threshold</name><value>1000</value></property>
+<property><name>hadoop.logfile.size</name><value>10000000</value></property>
+<property><name>pregelix.vertexInputFormatClass</name><value>edu.uci.ics.pregelix.example.inputformat.TextPageRankInputFormat</value></property>
+<property><name>pregelix.aggregatorClass</name><value>edu.uci.ics.pregelix.example.trianglecounting.TriangleCountingAggregator</value></property>
+<property><name>mapred.job.queue.name</name><value>default</value></property>
+<property><name>mapred.job.tracker.persist.jobstatus.active</name><value>false</value></property>
+<property><name>mapred.reduce.slowstart.completed.maps</name><value>0.05</value></property>
+<property><name>topology.script.number.args</name><value>100</value></property>
+<property><name>mapred.skip.map.max.skip.records</name><value>0</value></property>
+<property><name>fs.ftp.impl</name><value>org.apache.hadoop.fs.ftp.FTPFileSystem</value></property>
+<property><name>mapred.task.cache.levels</name><value>2</value></property>
+<property><name>mapred.job.tracker.handler.count</name><value>10</value></property>
+<property><name>io.serializations</name><value>org.apache.hadoop.io.serializer.WritableSerialization</value></property>
+<property><name>ipc.client.connect.max.retries</name><value>10</value></property>
+<property><name>mapred.min.split.size</name><value>0</value></property>
+<property><name>mapred.map.max.attempts</name><value>4</value></property>
+<property><name>jobclient.output.filter</name><value>FAILED</value></property>
+<property><name>ipc.client.tcpnodelay</name><value>false</value></property>
+<property><name>mapred.acls.enabled</name><value>false</value></property>
+</configuration>
\ No newline at end of file