Merge branch 'master' into zheilbron/asterix_msr_demo

Conflicts:
	asterix-algebra/src/main/java/edu/uci/ics/asterix/optimizer/rules/RemoveUnusedOneToOneEquiJoinRule.java
	asterix-app/src/test/resources/metadata/results/basic/meta17.adm
	asterix-app/src/test/resources/metadata/results/basic/meta17/meta17.1.adm
	asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
	asterix-app/src/test/resources/metadata/results/basic/metadata_datatype/metadata_datatype.1.adm
	asterix-app/src/test/resources/runtimets/testsuite.xml
	asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
	asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
	asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
diff --git a/asterix-app/data/csv/fragile_02.adm b/asterix-app/data/csv/fragile_02.adm
new file mode 100644
index 0000000..92b4477
--- /dev/null
+++ b/asterix-app/data/csv/fragile_02.adm
Binary files differ
diff --git a/asterix-app/data/twitter/tw_messages_100.adm b/asterix-app/data/twitter/tw_messages_100.adm
new file mode 100644
index 0000000..f60be3e
--- /dev/null
+++ b/asterix-app/data/twitter/tw_messages_100.adm
@@ -0,0 +1,100 @@
+{ "tweetid": 1i64, "user": { "screen-name": "EdwardLeslie#333", "lang": "en", "friends_count": 31, "statuses_count": 107, "name": "Edward Leslie", "followers_count": 80 }, "sender-location": point("29.37,78.8"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "at&t", "network" }}, "message-text": " can't stand at&t the network is terrible:(" }
+{ "tweetid": 2i64, "user": { "screen-name": "PenniBauerle$865", "lang": "en", "friends_count": 32, "statuses_count": 308, "name": "Penni Bauerle", "followers_count": 97 }, "sender-location": point("37.99,83.51"), "send-time": datetime("2011-09-23T10:10:00.000Z"), "referred-topics": {{ "iphone", "plan" }}, "message-text": " love iphone its plan is awesome" }
+{ "tweetid": 3i64, "user": { "screen-name": "TrudiSaline$17", "lang": "en", "friends_count": 2, "statuses_count": 248, "name": "Trudi Saline", "followers_count": 154 }, "sender-location": point("48.17,93.4"), "send-time": datetime("2007-07-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "3G" }}, "message-text": " like sprint its 3G is good:)" }
+{ "tweetid": 4i64, "user": { "screen-name": "EdytheMurray#502", "lang": "en", "friends_count": 23, "statuses_count": 142, "name": "Edythe Murray", "followers_count": 164 }, "sender-location": point("24.63,90.02"), "send-time": datetime("2008-03-16T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voice-clarity" }}, "message-text": " like t-mobile the voice-clarity is good:)" }
+{ "tweetid": 5i64, "user": { "screen-name": "CoralMoon#517", "lang": "en", "friends_count": 35, "statuses_count": 3, "name": "Coral Moon", "followers_count": 67 }, "sender-location": point("32.05,75.79"), "send-time": datetime("2006-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " love samsung the touch-screen is mind-blowing" }
+{ "tweetid": 6i64, "user": { "screen-name": "CarriePinney#881", "lang": "en", "friends_count": 77, "statuses_count": 113, "name": "Carrie Pinney", "followers_count": 120 }, "sender-location": point("45.72,93.27"), "send-time": datetime("2011-12-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is awesome:)" }
+{ "tweetid": 7i64, "user": { "screen-name": "AmadoTomey_367", "lang": "en", "friends_count": 28, "statuses_count": 379, "name": "Amado Tomey", "followers_count": 119 }, "sender-location": point("43.0,96.53"), "send-time": datetime("2011-07-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " hate verizon its platform is OMG:(" }
+{ "tweetid": 8i64, "user": { "screen-name": "OdellWallace#398", "lang": "en", "friends_count": 10, "statuses_count": 89, "name": "Odell Wallace", "followers_count": 4 }, "sender-location": point("28.61,90.69"), "send-time": datetime("2012-01-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is amazing:)" }
+{ "tweetid": 9i64, "user": { "screen-name": "NickLing#80", "lang": "en", "friends_count": 99, "statuses_count": 291, "name": "Nick Ling", "followers_count": 144 }, "sender-location": point("33.59,71.74"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "speed" }}, "message-text": " hate t-mobile the speed is horrible:(" }
+{ "tweetid": 10i64, "user": { "screen-name": "MickeyDunkle_962", "lang": "en", "friends_count": 46, "statuses_count": 429, "name": "Mickey Dunkle", "followers_count": 110 }, "sender-location": point("28.72,70.51"), "send-time": datetime("2006-05-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " can't stand at&t its reachability is OMG:(" }
+{ "tweetid": 11i64, "user": { "screen-name": "AlaynaKnopsnider$684", "lang": "en", "friends_count": 70, "statuses_count": 425, "name": "Alayna Knopsnider", "followers_count": 106 }, "sender-location": point("35.4,69.61"), "send-time": datetime("2012-08-15T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " dislike sprint the voice-command is bad" }
+{ "tweetid": 12i64, "user": { "screen-name": "SeraphinaWall_37", "lang": "en", "friends_count": 34, "statuses_count": 43, "name": "Seraphina Wall", "followers_count": 101 }, "sender-location": point("27.83,95.15"), "send-time": datetime("2010-02-08T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " like motorola its signal is amazing:)" }
+{ "tweetid": 13i64, "user": { "screen-name": "TonyaKnopsnider#342", "lang": "en", "friends_count": 96, "statuses_count": 479, "name": "Tonya Knopsnider", "followers_count": 105 }, "sender-location": point("27.95,74.39"), "send-time": datetime("2008-05-26T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " dislike motorola its voicemail-service is bad" }
+{ "tweetid": 14i64, "user": { "screen-name": "SkylerStough#713", "lang": "en", "friends_count": 29, "statuses_count": 41, "name": "Skyler Stough", "followers_count": 118 }, "sender-location": point("39.72,68.97"), "send-time": datetime("2012-05-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "3G" }}, "message-text": " love iphone its 3G is awesome:)" }
+{ "tweetid": 15i64, "user": { "screen-name": "IrisMillard$830", "lang": "en", "friends_count": 9, "statuses_count": 56, "name": "Iris Millard", "followers_count": 127 }, "sender-location": point("27.59,95.34"), "send-time": datetime("2010-02-07T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " like sprint the voice-clarity is amazing" }
+{ "tweetid": 16i64, "user": { "screen-name": "KaylynBrinigh_817", "lang": "en", "friends_count": 11, "statuses_count": 448, "name": "Kaylyn Brinigh", "followers_count": 53 }, "sender-location": point("25.19,79.71"), "send-time": datetime("2005-04-06T10:10:00.000Z"), "referred-topics": {{ "samsung", "customization" }}, "message-text": " love samsung its customization is amazing:)" }
+{ "tweetid": 17i64, "user": { "screen-name": "SungHoopengarner#732", "lang": "en", "friends_count": 55, "statuses_count": 129, "name": "Sung Hoopengarner", "followers_count": 152 }, "sender-location": point("47.75,93.12"), "send-time": datetime("2010-01-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " dislike motorola its voice-command is horrible:(" }
+{ "tweetid": 18i64, "user": { "screen-name": "RenatoRyals_261", "lang": "en", "friends_count": 46, "statuses_count": 439, "name": "Renato Ryals", "followers_count": 73 }, "sender-location": point("38.48,75.0"), "send-time": datetime("2010-04-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "signal" }}, "message-text": " love sprint its signal is good:)" }
+{ "tweetid": 19i64, "user": { "screen-name": "JohnnieHanseu#755", "lang": "en", "friends_count": 84, "statuses_count": 281, "name": "Johnnie Hanseu", "followers_count": 70 }, "sender-location": point("42.75,70.91"), "send-time": datetime("2010-06-12T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "3G" }}, "message-text": " like t-mobile its 3G is mind-blowing:)" }
+{ "tweetid": 20i64, "user": { "screen-name": "LindseyRahl#362", "lang": "en", "friends_count": 27, "statuses_count": 458, "name": "Lindsey Rahl", "followers_count": 24 }, "sender-location": point("36.2,94.8"), "send-time": datetime("2007-01-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is horrible:(" }
+{ "tweetid": 21i64, "user": { "screen-name": "CearaLing$289", "lang": "en", "friends_count": 39, "statuses_count": 177, "name": "Ceara Ling", "followers_count": 40 }, "sender-location": point("39.58,71.28"), "send-time": datetime("2008-05-20T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung the reachability is amazing:)" }
+{ "tweetid": 22i64, "user": { "screen-name": "DomoniqueEisenmann_636", "lang": "en", "friends_count": 27, "statuses_count": 465, "name": "Domonique Eisenmann", "followers_count": 166 }, "sender-location": point("47.11,77.87"), "send-time": datetime("2008-10-24T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " can't stand sprint its voice-command is horrible:(" }
+{ "tweetid": 23i64, "user": { "screen-name": "MelanieGadow$539", "lang": "en", "friends_count": 34, "statuses_count": 112, "name": "Melanie Gadow", "followers_count": 65 }, "sender-location": point("31.9,87.22"), "send-time": datetime("2012-07-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " like sprint its speed is mind-blowing:)" }
+{ "tweetid": 24i64, "user": { "screen-name": "HewiePeters#654", "lang": "en", "friends_count": 8, "statuses_count": 309, "name": "Hewie Peters", "followers_count": 15 }, "sender-location": point("42.84,90.27"), "send-time": datetime("2011-02-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t its wireless is terrible:(" }
+{ "tweetid": 25i64, "user": { "screen-name": "HollisJudge#731", "lang": "en", "friends_count": 58, "statuses_count": 211, "name": "Hollis Judge", "followers_count": 190 }, "sender-location": point("34.33,83.22"), "send-time": datetime("2006-11-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-clarity" }}, "message-text": " dislike samsung its voice-clarity is OMG:(" }
+{ "tweetid": 26i64, "user": { "screen-name": "DemarcusHarrow$822", "lang": "en", "friends_count": 60, "statuses_count": 171, "name": "Demarcus Harrow", "followers_count": 151 }, "sender-location": point("37.01,80.04"), "send-time": datetime("2012-07-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "shortcut-menu" }}, "message-text": " like at&t its shortcut-menu is awesome" }
+{ "tweetid": 27i64, "user": { "screen-name": "OrsonBauerle$52", "lang": "en", "friends_count": 91, "statuses_count": 271, "name": "Orson Bauerle", "followers_count": 144 }, "sender-location": point("48.91,75.54"), "send-time": datetime("2010-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "speed" }}, "message-text": " love samsung the speed is amazing:)" }
+{ "tweetid": 28i64, "user": { "screen-name": "ChadBeach#363", "lang": "en", "friends_count": 88, "statuses_count": 275, "name": "Chad Beach", "followers_count": 142 }, "sender-location": point("35.5,73.83"), "send-time": datetime("2007-07-28T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is mind-blowing" }
+{ "tweetid": 29i64, "user": { "screen-name": "LupeNewbern#345", "lang": "en", "friends_count": 99, "statuses_count": 45, "name": "Lupe Newbern", "followers_count": 86 }, "sender-location": point("35.07,70.43"), "send-time": datetime("2010-12-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voicemail-service" }}, "message-text": " love t-mobile its voicemail-service is awesome" }
+{ "tweetid": 30i64, "user": { "screen-name": "LoydJohnston@664", "lang": "en", "friends_count": 86, "statuses_count": 10, "name": "Loyd Johnston", "followers_count": 58 }, "sender-location": point("42.55,72.33"), "send-time": datetime("2010-02-01T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone the network is awesome" }
+{ "tweetid": 31i64, "user": { "screen-name": "VerityMunson#211", "lang": "en", "friends_count": 75, "statuses_count": 359, "name": "Verity Munson", "followers_count": 165 }, "sender-location": point("30.65,77.21"), "send-time": datetime("2009-01-06T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-command" }}, "message-text": " can't stand verizon the voice-command is bad:(" }
+{ "tweetid": 32i64, "user": { "screen-name": "RinaHerndon#616", "lang": "en", "friends_count": 19, "statuses_count": 265, "name": "Rina Herndon", "followers_count": 26 }, "sender-location": point("40.76,75.79"), "send-time": datetime("2009-09-19T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " love verizon the shortcut-menu is mind-blowing:)" }
+{ "tweetid": 33i64, "user": { "screen-name": "MadelaineSchreckengost@250", "lang": "en", "friends_count": 45, "statuses_count": 310, "name": "Madelaine Schreckengost", "followers_count": 153 }, "sender-location": point("30.35,66.43"), "send-time": datetime("2005-07-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "plan" }}, "message-text": " can't stand at&t the plan is bad" }
+{ "tweetid": 34i64, "user": { "screen-name": "RadclyffeStaymates_289", "lang": "en", "friends_count": 50, "statuses_count": 188, "name": "Radclyffe Staymates", "followers_count": 97 }, "sender-location": point("45.42,77.18"), "send-time": datetime("2012-05-06T10:10:00.000Z"), "referred-topics": {{ "at&t", "customer-service" }}, "message-text": " hate at&t its customer-service is OMG" }
+{ "tweetid": 35i64, "user": { "screen-name": "VernieAlice$968", "lang": "en", "friends_count": 70, "statuses_count": 491, "name": "Vernie Alice", "followers_count": 193 }, "sender-location": point("28.03,79.37"), "send-time": datetime("2010-01-19T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " can't stand motorola the voice-command is horrible" }
+{ "tweetid": 36i64, "user": { "screen-name": "GertieDugger#987", "lang": "en", "friends_count": 22, "statuses_count": 72, "name": "Gertie Dugger", "followers_count": 12 }, "sender-location": point("25.77,92.7"), "send-time": datetime("2009-09-25T10:10:00.000Z"), "referred-topics": {{ "sprint", "touch-screen" }}, "message-text": " like sprint its touch-screen is awesome" }
+{ "tweetid": 37i64, "user": { "screen-name": "AggieBollinger@675", "lang": "en", "friends_count": 45, "statuses_count": 175, "name": "Aggie Bollinger", "followers_count": 67 }, "sender-location": point("42.6,68.28"), "send-time": datetime("2012-02-22T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " love sprint its voice-clarity is awesome" }
+{ "tweetid": 38i64, "user": { "screen-name": "JocelynPatton$328", "lang": "en", "friends_count": 35, "statuses_count": 484, "name": "Jocelyn Patton", "followers_count": 174 }, "sender-location": point("28.77,88.28"), "send-time": datetime("2006-12-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t the wireless is horrible:(" }
+{ "tweetid": 39i64, "user": { "screen-name": "CandelariaHujsak#602", "lang": "en", "friends_count": 28, "statuses_count": 499, "name": "Candelaria Hujsak", "followers_count": 94 }, "sender-location": point("36.09,96.94"), "send-time": datetime("2007-11-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "wireless" }}, "message-text": " can't stand t-mobile the wireless is terrible:(" }
+{ "tweetid": 40i64, "user": { "screen-name": "DamarisMueller#283", "lang": "en", "friends_count": 46, "statuses_count": 122, "name": "Damaris Mueller", "followers_count": 189 }, "sender-location": point("44.31,73.93"), "send-time": datetime("2012-02-28T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " like sprint its wireless is awesome" }
+{ "tweetid": 41i64, "user": { "screen-name": "ChuckPhilbrick_884", "lang": "en", "friends_count": 73, "statuses_count": 237, "name": "Chuck Philbrick", "followers_count": 35 }, "sender-location": point("35.39,81.04"), "send-time": datetime("2012-05-07T10:10:00.000Z"), "referred-topics": {{ "verizon", "plan" }}, "message-text": " love verizon its plan is good:)" }
+{ "tweetid": 42i64, "user": { "screen-name": "BraxtonKifer_723", "lang": "en", "friends_count": 65, "statuses_count": 459, "name": "Braxton Kifer", "followers_count": 6 }, "sender-location": point("30.23,70.06"), "send-time": datetime("2007-10-15T10:10:00.000Z"), "referred-topics": {{ "verizon", "touch-screen" }}, "message-text": " dislike verizon the touch-screen is horrible" }
+{ "tweetid": 43i64, "user": { "screen-name": "DeshawnPorter#734", "lang": "en", "friends_count": 26, "statuses_count": 408, "name": "Deshawn Porter", "followers_count": 14 }, "sender-location": point("35.2,82.65"), "send-time": datetime("2005-10-06T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is amazing" }
+{ "tweetid": 44i64, "user": { "screen-name": "SamanthaBeach$879", "lang": "en", "friends_count": 95, "statuses_count": 481, "name": "Samantha Beach", "followers_count": 119 }, "sender-location": point("30.28,89.79"), "send-time": datetime("2005-09-20T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " love motorola the network is mind-blowing:)" }
+{ "tweetid": 45i64, "user": { "screen-name": "NoelleBash_83", "lang": "en", "friends_count": 4, "statuses_count": 148, "name": "Noelle Bash", "followers_count": 139 }, "sender-location": point("42.4,96.94"), "send-time": datetime("2007-01-05T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " hate iphone its platform is terrible:(" }
+{ "tweetid": 46i64, "user": { "screen-name": "RuthWells#712", "lang": "en", "friends_count": 51, "statuses_count": 415, "name": "Ruth Wells", "followers_count": 57 }, "sender-location": point("31.93,82.03"), "send-time": datetime("2007-04-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " dislike iphone the customization is bad:(" }
+{ "tweetid": 47i64, "user": { "screen-name": "NakiaClose@771", "lang": "en", "friends_count": 59, "statuses_count": 239, "name": "Nakia Close", "followers_count": 105 }, "sender-location": point("47.06,92.54"), "send-time": datetime("2005-02-18T10:10:00.000Z"), "referred-topics": {{ "motorola", "3G" }}, "message-text": " can't stand motorola its 3G is OMG:(" }
+{ "tweetid": 48i64, "user": { "screen-name": "EmLinton#420", "lang": "en", "friends_count": 87, "statuses_count": 481, "name": "Em Linton", "followers_count": 141 }, "sender-location": point("35.6,88.2"), "send-time": datetime("2006-09-24T10:10:00.000Z"), "referred-topics": {{ "iphone", "customer-service" }}, "message-text": " hate iphone its customer-service is horrible" }
+{ "tweetid": 49i64, "user": { "screen-name": "DarbyPatton_703", "lang": "en", "friends_count": 40, "statuses_count": 79, "name": "Darby Patton", "followers_count": 159 }, "sender-location": point("36.57,84.01"), "send-time": datetime("2006-06-14T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " love verizon its platform is good" }
+{ "tweetid": 50i64, "user": { "screen-name": "WilburStephenson$295", "lang": "en", "friends_count": 57, "statuses_count": 337, "name": "Wilbur Stephenson", "followers_count": 188 }, "sender-location": point("38.35,83.92"), "send-time": datetime("2006-10-14T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " dislike motorola the plan is OMG:(" }
+{ "tweetid": 51i64, "user": { "screen-name": "PalmerHahn@368", "lang": "en", "friends_count": 13, "statuses_count": 196, "name": "Palmer Hahn", "followers_count": 69 }, "sender-location": point("48.96,88.74"), "send-time": datetime("2006-01-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " like samsung its shortcut-menu is awesome" }
+{ "tweetid": 52i64, "user": { "screen-name": "HarlanWynne_297", "lang": "en", "friends_count": 71, "statuses_count": 262, "name": "Harlan Wynne", "followers_count": 151 }, "sender-location": point("41.05,93.92"), "send-time": datetime("2008-07-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung its platform is awesome" }
+{ "tweetid": 53i64, "user": { "screen-name": "GrettaCable#405", "lang": "en", "friends_count": 7, "statuses_count": 324, "name": "Gretta Cable", "followers_count": 82 }, "sender-location": point("40.6,71.86"), "send-time": datetime("2010-11-16T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone its network is amazing:)" }
+{ "tweetid": 54i64, "user": { "screen-name": "PhilipaRing_461", "lang": "en", "friends_count": 43, "statuses_count": 53, "name": "Philipa Ring", "followers_count": 164 }, "sender-location": point("30.47,90.14"), "send-time": datetime("2011-12-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " like motorola its voicemail-service is amazing" }
+{ "tweetid": 55i64, "user": { "screen-name": "LindseyBurch_187", "lang": "en", "friends_count": 9, "statuses_count": 54, "name": "Lindsey Burch", "followers_count": 6 }, "sender-location": point("31.66,68.68"), "send-time": datetime("2011-12-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " can't stand samsung its touch-screen is terrible" }
+{ "tweetid": 56i64, "user": { "screen-name": "AnnabelLosey_61", "lang": "en", "friends_count": 53, "statuses_count": 381, "name": "Annabel Losey", "followers_count": 133 }, "sender-location": point("37.33,85.16"), "send-time": datetime("2005-11-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "customization" }}, "message-text": " can't stand sprint the customization is horrible:(" }
+{ "tweetid": 57i64, "user": { "screen-name": "HectorLalty@132", "lang": "en", "friends_count": 2, "statuses_count": 195, "name": "Hector Lalty", "followers_count": 92 }, "sender-location": point("46.52,80.45"), "send-time": datetime("2012-04-15T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " hate iphone the reachability is bad:(" }
+{ "tweetid": 58i64, "user": { "screen-name": "KatieWilkins_817", "lang": "en", "friends_count": 95, "statuses_count": 476, "name": "Katie Wilkins", "followers_count": 151 }, "sender-location": point("44.72,69.13"), "send-time": datetime("2006-11-01T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is amazing:)" }
+{ "tweetid": 59i64, "user": { "screen-name": "BrianneRamsey$451", "lang": "en", "friends_count": 13, "statuses_count": 69, "name": "Brianne Ramsey", "followers_count": 102 }, "sender-location": point("37.02,80.95"), "send-time": datetime("2007-02-08T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " dislike verizon the network is terrible" }
+{ "tweetid": 60i64, "user": { "screen-name": "RinaHujsak#7", "lang": "en", "friends_count": 69, "statuses_count": 73, "name": "Rina Hujsak", "followers_count": 63 }, "sender-location": point("28.27,73.68"), "send-time": datetime("2009-03-28T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is amazing:)" }
+{ "tweetid": 61i64, "user": { "screen-name": "GertieSadley$508", "lang": "en", "friends_count": 35, "statuses_count": 235, "name": "Gertie Sadley", "followers_count": 87 }, "sender-location": point("40.19,86.0"), "send-time": datetime("2006-07-27T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " love at&t its reachability is mind-blowing:)" }
+{ "tweetid": 62i64, "user": { "screen-name": "AaronJackson_273", "lang": "en", "friends_count": 98, "statuses_count": 205, "name": "Aaron Jackson", "followers_count": 128 }, "sender-location": point("48.11,85.01"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-command" }}, "message-text": " like iphone the voice-command is awesome:)" }
+{ "tweetid": 63i64, "user": { "screen-name": "CreightonHujsak$142", "lang": "en", "friends_count": 21, "statuses_count": 68, "name": "Creighton Hujsak", "followers_count": 70 }, "sender-location": point("40.55,90.98"), "send-time": datetime("2010-08-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "voicemail-service" }}, "message-text": " love samsung the voicemail-service is amazing" }
+{ "tweetid": 64i64, "user": { "screen-name": "KazukoWilkinson$204", "lang": "en", "friends_count": 51, "statuses_count": 147, "name": "Kazuko Wilkinson", "followers_count": 86 }, "sender-location": point("29.64,94.45"), "send-time": datetime("2008-08-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " love motorola the speed is mind-blowing:)" }
+{ "tweetid": 65i64, "user": { "screen-name": "GonzaloDiegel#186", "lang": "en", "friends_count": 80, "statuses_count": 149, "name": "Gonzalo Diegel", "followers_count": 89 }, "sender-location": point("48.68,83.09"), "send-time": datetime("2008-04-24T10:10:00.000Z"), "referred-topics": {{ "at&t", "voicemail-service" }}, "message-text": " dislike at&t its voicemail-service is horrible:(" }
+{ "tweetid": 66i64, "user": { "screen-name": "KizzyKanaga$317", "lang": "en", "friends_count": 52, "statuses_count": 330, "name": "Kizzy Kanaga", "followers_count": 6 }, "sender-location": point("27.96,90.03"), "send-time": datetime("2009-10-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "touch-screen" }}, "message-text": " like at&t the touch-screen is amazing" }
+{ "tweetid": 67i64, "user": { "screen-name": "CraigTreeby@171", "lang": "en", "friends_count": 72, "statuses_count": 44, "name": "Craig Treeby", "followers_count": 155 }, "sender-location": point("48.99,91.21"), "send-time": datetime("2006-02-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "signal" }}, "message-text": " love samsung the signal is amazing:)" }
+{ "tweetid": 68i64, "user": { "screen-name": "BrionySaltser#395", "lang": "en", "friends_count": 21, "statuses_count": 422, "name": "Briony Saltser", "followers_count": 129 }, "sender-location": point("37.33,67.08"), "send-time": datetime("2006-03-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " love samsung its shortcut-menu is amazing:)" }
+{ "tweetid": 69i64, "user": { "screen-name": "MagdaleneWerner$925", "lang": "en", "friends_count": 46, "statuses_count": 446, "name": "Magdalene Werner", "followers_count": 75 }, "sender-location": point("45.77,83.23"), "send-time": datetime("2005-06-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "signal" }}, "message-text": " like iphone the signal is mind-blowing" }
+{ "tweetid": 70i64, "user": { "screen-name": "FlossieBaker$898", "lang": "en", "friends_count": 67, "statuses_count": 63, "name": "Flossie Baker", "followers_count": 50 }, "sender-location": point("44.37,89.4"), "send-time": datetime("2011-07-16T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " like motorola its network is good" }
+{ "tweetid": 71i64, "user": { "screen-name": "GradyGraff$247", "lang": "en", "friends_count": 21, "statuses_count": 58, "name": "Grady Graff", "followers_count": 45 }, "sender-location": point("24.81,67.13"), "send-time": datetime("2012-04-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "reachability" }}, "message-text": " like motorola the reachability is good" }
+{ "tweetid": 72i64, "user": { "screen-name": "MelitaLombardi@324", "lang": "en", "friends_count": 39, "statuses_count": 32, "name": "Melita Lombardi", "followers_count": 167 }, "sender-location": point("24.23,73.03"), "send-time": datetime("2011-02-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " hate verizon the network is terrible:(" }
+{ "tweetid": 73i64, "user": { "screen-name": "HerbertPowell_651", "lang": "en", "friends_count": 17, "statuses_count": 57, "name": "Herbert Powell", "followers_count": 167 }, "sender-location": point("47.22,92.69"), "send-time": datetime("2005-01-25T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is awesome:)" }
+{ "tweetid": 74i64, "user": { "screen-name": "BasilSanborn$23", "lang": "en", "friends_count": 38, "statuses_count": 391, "name": "Basil Sanborn", "followers_count": 108 }, "sender-location": point("30.96,68.0"), "send-time": datetime("2008-12-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " can't stand samsung the network is bad" }
+{ "tweetid": 75i64, "user": { "screen-name": "LaurineZoucks$307", "lang": "en", "friends_count": 27, "statuses_count": 161, "name": "Laurine Zoucks", "followers_count": 144 }, "sender-location": point("40.78,91.08"), "send-time": datetime("2009-11-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " like motorola the customer-service is amazing" }
+{ "tweetid": 76i64, "user": { "screen-name": "LincolnMarriman@675", "lang": "en", "friends_count": 3, "statuses_count": 389, "name": "Lincoln Marriman", "followers_count": 125 }, "sender-location": point("28.4,83.82"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "customer-service" }}, "message-text": " like verizon the customer-service is mind-blowing" }
+{ "tweetid": 77i64, "user": { "screen-name": "FrancesFinlay#683", "lang": "en", "friends_count": 71, "statuses_count": 174, "name": "Frances Finlay", "followers_count": 32 }, "sender-location": point("29.71,66.36"), "send-time": datetime("2012-04-18T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " love iphone the customization is awesome" }
+{ "tweetid": 78i64, "user": { "screen-name": "ModestoMarriman_627", "lang": "en", "friends_count": 76, "statuses_count": 2, "name": "Modesto Marriman", "followers_count": 33 }, "sender-location": point("33.77,92.15"), "send-time": datetime("2011-09-26T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " love samsung its network is mind-blowing" }
+{ "tweetid": 79i64, "user": { "screen-name": "FlossieCamp#59", "lang": "en", "friends_count": 17, "statuses_count": 484, "name": "Flossie Camp", "followers_count": 142 }, "sender-location": point("24.67,77.24"), "send-time": datetime("2005-07-03T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " like iphone its reachability is awesome:)" }
+{ "tweetid": 80i64, "user": { "screen-name": "DouglasKing@553", "lang": "en", "friends_count": 62, "statuses_count": 251, "name": "Douglas King", "followers_count": 180 }, "sender-location": point("24.84,74.15"), "send-time": datetime("2009-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " can't stand sprint the speed is bad:(" }
+{ "tweetid": 81i64, "user": { "screen-name": "WardCasteel@972", "lang": "en", "friends_count": 8, "statuses_count": 358, "name": "Ward Casteel", "followers_count": 51 }, "sender-location": point("41.41,91.32"), "send-time": datetime("2007-05-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is terrible:(" }
+{ "tweetid": 82i64, "user": { "screen-name": "AdelaErskine#579", "lang": "en", "friends_count": 97, "statuses_count": 354, "name": "Adela Erskine", "followers_count": 155 }, "sender-location": point("35.56,68.19"), "send-time": datetime("2009-03-23T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " hate samsung the touch-screen is bad:(" }
+{ "tweetid": 83i64, "user": { "screen-name": "ClevelandPrevatt#255", "lang": "en", "friends_count": 24, "statuses_count": 159, "name": "Cleveland Prevatt", "followers_count": 68 }, "sender-location": point("38.6,67.51"), "send-time": datetime("2006-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "platform" }}, "message-text": " hate sprint its platform is OMG:(" }
+{ "tweetid": 84i64, "user": { "screen-name": "MaxwellTreeby@610", "lang": "en", "friends_count": 21, "statuses_count": 168, "name": "Maxwell Treeby", "followers_count": 138 }, "sender-location": point("38.37,79.64"), "send-time": datetime("2007-07-17T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola its speed is mind-blowing" }
+{ "tweetid": 85i64, "user": { "screen-name": "BobbyBastion$235", "lang": "en", "friends_count": 48, "statuses_count": 251, "name": "Bobby Bastion", "followers_count": 123 }, "sender-location": point("45.84,83.03"), "send-time": datetime("2009-03-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " love samsung its voice-command is amazing" }
+{ "tweetid": 86i64, "user": { "screen-name": "ClairKanaga$512", "lang": "en", "friends_count": 88, "statuses_count": 274, "name": "Clair Kanaga", "followers_count": 77 }, "sender-location": point("46.34,84.86"), "send-time": datetime("2006-07-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " love samsung its reachability is mind-blowing:)" }
+{ "tweetid": 87i64, "user": { "screen-name": "HueyLosey_966", "lang": "en", "friends_count": 78, "statuses_count": 32, "name": "Huey Losey", "followers_count": 2 }, "sender-location": point("25.61,78.89"), "send-time": datetime("2011-03-22T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung its reachability is good:)" }
+{ "tweetid": 88i64, "user": { "screen-name": "SooThigpen#463", "lang": "en", "friends_count": 5, "statuses_count": 429, "name": "Soo Thigpen", "followers_count": 18 }, "sender-location": point("34.84,74.43"), "send-time": datetime("2009-03-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "wireless" }}, "message-text": " love motorola the wireless is good:)" }
+{ "tweetid": 89i64, "user": { "screen-name": "LacreshaWire_320", "lang": "en", "friends_count": 92, "statuses_count": 127, "name": "Lacresha Wire", "followers_count": 194 }, "sender-location": point("47.73,86.79"), "send-time": datetime("2007-08-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "wireless" }}, "message-text": " can't stand verizon its wireless is OMG:(" }
+{ "tweetid": 90i64, "user": { "screen-name": "MyriamLambert@966", "lang": "en", "friends_count": 22, "statuses_count": 452, "name": "Myriam Lambert", "followers_count": 193 }, "sender-location": point("41.85,88.44"), "send-time": datetime("2008-12-02T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "plan" }}, "message-text": " hate t-mobile the plan is bad" }
+{ "tweetid": 91i64, "user": { "screen-name": "WoodyWhite@341", "lang": "en", "friends_count": 12, "statuses_count": 183, "name": "Woody White", "followers_count": 31 }, "sender-location": point("29.04,85.35"), "send-time": datetime("2006-02-06T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is good" }
+{ "tweetid": 92i64, "user": { "screen-name": "QuinDickinson#157", "lang": "en", "friends_count": 84, "statuses_count": 415, "name": "Quin Dickinson", "followers_count": 9 }, "sender-location": point("40.86,67.52"), "send-time": datetime("2006-01-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "signal" }}, "message-text": " can't stand t-mobile the signal is horrible:(" }
+{ "tweetid": 93i64, "user": { "screen-name": "BettieRing@713", "lang": "en", "friends_count": 39, "statuses_count": 373, "name": "Bettie Ring", "followers_count": 98 }, "sender-location": point("26.37,69.03"), "send-time": datetime("2005-10-04T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "reachability" }}, "message-text": " dislike t-mobile the reachability is terrible:(" }
+{ "tweetid": 94i64, "user": { "screen-name": "LinaDraudy_733", "lang": "en", "friends_count": 70, "statuses_count": 228, "name": "Lina Draudy", "followers_count": 9 }, "sender-location": point("39.58,97.38"), "send-time": datetime("2012-03-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " like verizon the network is awesome:)" }
+{ "tweetid": 95i64, "user": { "screen-name": "StacyFleming#907", "lang": "en", "friends_count": 37, "statuses_count": 119, "name": "Stacy Fleming", "followers_count": 113 }, "sender-location": point("24.27,94.53"), "send-time": datetime("2007-10-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " love samsung its platform is amazing:)" }
+{ "tweetid": 96i64, "user": { "screen-name": "AmbroseAllshouse_786", "lang": "en", "friends_count": 24, "statuses_count": 299, "name": "Ambrose Allshouse", "followers_count": 23 }, "sender-location": point("34.88,73.05"), "send-time": datetime("2009-01-09T10:10:00.000Z"), "referred-topics": {{ "verizon", "speed" }}, "message-text": " hate verizon the speed is horrible:(" }
+{ "tweetid": 97i64, "user": { "screen-name": "VaughnFocell_20", "lang": "en", "friends_count": 68, "statuses_count": 388, "name": "Vaughn Focell", "followers_count": 171 }, "sender-location": point("34.67,73.46"), "send-time": datetime("2012-01-24T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customer-service" }}, "message-text": " can't stand t-mobile its customer-service is terrible" }
+{ "tweetid": 98i64, "user": { "screen-name": "UlyssesCrissman#115", "lang": "en", "friends_count": 90, "statuses_count": 250, "name": "Ulysses Crissman", "followers_count": 110 }, "sender-location": point("24.81,93.59"), "send-time": datetime("2008-04-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " love motorola its customer-service is awesome" }
+{ "tweetid": 99i64, "user": { "screen-name": "WatCrissman#703", "lang": "en", "friends_count": 50, "statuses_count": 244, "name": "Wat Crissman", "followers_count": 123 }, "sender-location": point("33.22,92.64"), "send-time": datetime("2006-09-15T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " can't stand motorola the plan is terrible" }
+{ "tweetid": 100i64, "user": { "screen-name": "BambiLaurence$910", "lang": "en", "friends_count": 57, "statuses_count": 311, "name": "Bambi Laurence", "followers_count": 136 }, "sender-location": point("36.88,80.08"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is mind-blowing" }
diff --git a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java
index f0e51b2..fea44a2 100644
--- a/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java
+++ b/asterix-app/src/main/java/edu/uci/ics/asterix/hyracks/bootstrap/FeedLifecycleListener.java
@@ -32,6 +32,8 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.commons.lang3.StringUtils;
+
 import edu.uci.ics.asterix.api.common.APIFramework.DisplayFormat;
 import edu.uci.ics.asterix.api.common.SessionConfig;
 import edu.uci.ics.asterix.aql.base.Statement;
@@ -41,6 +43,7 @@
 import edu.uci.ics.asterix.aql.expression.Identifier;
 import edu.uci.ics.asterix.aql.translator.AqlTranslator;
 import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
 import edu.uci.ics.asterix.common.feeds.SuperFeedManager;
 import edu.uci.ics.asterix.event.schema.cluster.Cluster;
@@ -353,80 +356,52 @@
                 Map<String, String> feedActivityDetails = new HashMap<String, String>();
                 StringBuilder ingestLocs = new StringBuilder();
                 for (OperatorDescriptorId ingestOpId : ingestOperatorIds) {
-                    feedInfo.ingestLocations.addAll(info.getOperatorLocations().get(ingestOpId));
+                    Map<Integer, String> operatorLocations = info.getOperatorLocations().get(ingestOpId);
+                    int nOperatorInstances = operatorLocations.size();
+                    for (int i = 0; i < nOperatorInstances; i++) {
+                        feedInfo.ingestLocations.add(operatorLocations.get(i));
+                    }
                 }
                 StringBuilder computeLocs = new StringBuilder();
                 for (OperatorDescriptorId computeOpId : computeOperatorIds) {
-                    List<String> locations = info.getOperatorLocations().get(computeOpId);
-                    if (locations != null) {
-                        feedInfo.computeLocations.addAll(locations);
+                    Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
+                    if (operatorLocations != null) {
+                        int nOperatorInstances = operatorLocations.size();
+                        for (int i = 0; i < nOperatorInstances; i++) {
+                            feedInfo.computeLocations.add(operatorLocations.get(i));
+                        }
                     } else {
                         feedInfo.computeLocations.addAll(feedInfo.ingestLocations);
                     }
                 }
+
                 StringBuilder storageLocs = new StringBuilder();
                 for (OperatorDescriptorId storageOpId : storageOperatorIds) {
-                    feedInfo.storageLocations.addAll(info.getOperatorLocations().get(storageOpId));
+                    Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
+                    int nOperatorInstances = operatorLocations.size();
+                    for (int i = 0; i < nOperatorInstances; i++) {
+                        feedInfo.storageLocations.add(operatorLocations.get(i));
+                    }
                 }
 
-                for (String ingestLoc : feedInfo.ingestLocations) {
-                    ingestLocs.append(ingestLoc);
-                    ingestLocs.append(",");
-                }
-                if (ingestLocs.length() > 1) {
-                    ingestLocs.deleteCharAt(ingestLocs.length() - 1);
-                }
-                for (String computeLoc : feedInfo.computeLocations) {
-                    computeLocs.append(computeLoc);
-                    computeLocs.append(",");
-                }
-                if (computeLocs.length() > 1) {
-                    computeLocs.deleteCharAt(computeLocs.length() - 1);
-                }
-                for (String storageLoc : feedInfo.storageLocations) {
-                    storageLocs.append(storageLoc);
-                    storageLocs.append(",");
-                }
-                if (storageLocs.length() > 1) {
-                    storageLocs.deleteCharAt(storageLocs.length() - 1);
-                }
+                ingestLocs.append(StringUtils.join(feedInfo.ingestLocations, ","));
+                computeLocs.append(StringUtils.join(feedInfo.computeLocations, ","));
+                storageLocs.append(StringUtils.join(feedInfo.storageLocations, ","));
 
                 feedActivityDetails.put(FeedActivity.FeedActivityDetails.INGEST_LOCATIONS, ingestLocs.toString());
                 feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS, computeLocs.toString());
                 feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS, storageLocs.toString());
-                feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME,
-                        feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY));
+                String policyName = feedInfo.feedPolicy.get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+                feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
 
-                int superFeedManagerIndex = new Random().nextInt(feedInfo.ingestLocations.size());
-                String superFeedManagerHost = feedInfo.ingestLocations.get(superFeedManagerIndex);
-
-                Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
-                String instanceName = cluster.getInstanceName();
-                String node = superFeedManagerHost.substring(instanceName.length() + 1);
-                String hostIp = null;
-                for (Node n : cluster.getNode()) {
-                    if (n.getId().equals(node)) {
-                        hostIp = n.getClusterIp();
-                        break;
+                FeedPolicyAccessor policyAccessor = new FeedPolicyAccessor(feedInfo.feedPolicy);
+                if (policyAccessor.collectStatistics() || policyAccessor.isElastic()) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Feed " + feedInfo.feedConnectionId + " requires Super Feed Manager");
                     }
-                }
-                if (hostIp == null) {
-                    throw new IllegalStateException("Unknown node " + superFeedManagerHost);
+                    configureSuperFeedManager(feedInfo, feedActivityDetails);
                 }
 
-                feedActivityDetails.put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_HOST, hostIp);
-                feedActivityDetails.put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_PORT, ""
-                        + superFeedManagerPort);
-
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Super Feed Manager for " + feedInfo.feedConnectionId + " is " + hostIp + " node "
-                            + superFeedManagerHost);
-                }
-
-                FeedManagerElectMessage feedMessage = new FeedManagerElectMessage(hostIp, superFeedManagerHost,
-                        superFeedManagerPort, feedInfo.feedConnectionId);
-                superFeedManagerPort += SuperFeedManager.PORT_RANGE_ASSIGNED;
-                messengerOutbox.add(new FeedMessengerMessage(feedMessage, feedInfo));
                 MetadataManager.INSTANCE.acquireWriteLatch();
                 MetadataTransactionContext mdTxnCtx = null;
                 try {
@@ -450,6 +425,41 @@
 
         }
 
+        private void configureSuperFeedManager(FeedInfo feedInfo, Map<String, String> feedActivityDetails) {
+            // TODO Auto-generated method stub
+            int superFeedManagerIndex = new Random().nextInt(feedInfo.ingestLocations.size());
+            String superFeedManagerHost = feedInfo.ingestLocations.get(superFeedManagerIndex);
+
+            Cluster cluster = AsterixClusterProperties.INSTANCE.getCluster();
+            String instanceName = cluster.getInstanceName();
+            String node = superFeedManagerHost.substring(instanceName.length() + 1);
+            String hostIp = null;
+            for (Node n : cluster.getNode()) {
+                if (n.getId().equals(node)) {
+                    hostIp = n.getClusterIp();
+                    break;
+                }
+            }
+            if (hostIp == null) {
+                throw new IllegalStateException("Unknown node " + superFeedManagerHost);
+            }
+
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_HOST, hostIp);
+            feedActivityDetails
+                    .put(FeedActivity.FeedActivityDetails.SUPER_FEED_MANAGER_PORT, "" + superFeedManagerPort);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Super Feed Manager for " + feedInfo.feedConnectionId + " is " + hostIp + " node "
+                        + superFeedManagerHost);
+            }
+
+            FeedManagerElectMessage feedMessage = new FeedManagerElectMessage(hostIp, superFeedManagerHost,
+                    superFeedManagerPort, feedInfo.feedConnectionId);
+            superFeedManagerPort += SuperFeedManager.PORT_RANGE_ASSIGNED;
+            messengerOutbox.add(new FeedMessengerMessage(feedMessage, feedInfo));
+
+        }
+
         private void handleJobFinishMessage(FeedInfo feedInfo, Message message) {
             MetadataManager.INSTANCE.acquireWriteLatch();
             MetadataTransactionContext mdTxnCtx = null;
@@ -458,15 +468,12 @@
                 try {
                     IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
                     JobInfo info = hcc.getJobInfo(message.jobId);
-                    JobStatus status = info.getPendingStatus();
-                    List<Exception> exceptions;
+                    JobStatus status = info.getStatus();
                     boolean failure = status != null && status.equals(JobStatus.FAILURE);
                     FeedActivityType activityType = FeedActivityType.FEED_END;
                     Map<String, String> details = new HashMap<String, String>();
                     if (failure) {
-                        exceptions = info.getPendingExceptions();
                         activityType = FeedActivityType.FEED_FAILURE;
-                        details.put(FeedActivity.FeedActivityDetails.EXCEPTION_MESSAGE, exceptions.get(0).getMessage());
                     }
                     mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
                     FeedActivity feedActivity = new FeedActivity(feedInfo.feedConnectionId.getDataverse(),
@@ -483,7 +490,10 @@
                         throw new IllegalStateException(" Unable to abort ");
                     }
                 } catch (Exception e) {
-                    // add exception handling here
+                    if (LOGGER.isLoggable(Level.WARNING)) {
+                        LOGGER.warning("Exception in handling job fninsh message " + message.jobId + "["
+                                + message.messageKind + "]" + " for job " + message.jobId);
+                    }
                 } finally {
                     MetadataManager.INSTANCE.releaseWriteLatch();
                 }
diff --git a/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql b/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
index dc11189..dc662b5 100644
--- a/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
+++ b/asterix-app/src/test/resources/metadata/queries/basic/meta15/meta15.3.query.aql
@@ -5,5 +5,5 @@
  */
 
 for $l in dataset('Metadata.DatasourceAdapter')
+order by $l.AdapterName
 return $l
-
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
index 43cf9ec..451fa99 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15.adm
@@ -1,10 +1,10 @@
 { "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "file_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
-{ "DataverseName": "Metadata", "Name": "generic_socket_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
-{ "DataverseName": "Metadata", "Name": "synthetic_twitter_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.SyntheticTwitterFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_adaptor", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_client", "Classname": "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory", "Type": "INTERNAL", "Timestamp": "Wed Nov 20 14:45:58 IST 2013" }
 { "DataverseName": "Metadata", "Name": "twitter_firehose", "Classname": "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
index f799a2a..66733d3 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta15/meta15.1.adm
@@ -1,10 +1,11 @@
 { "DataverseName": "Metadata", "Name": "azure_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedAzureTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Thu Oct 24 01:39:27 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "cnn_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "file_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
-{ "DataverseName": "Metadata", "Name": "generic_socket_feed", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "hdfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HDFSAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "hive", "Classname": "edu.uci.ics.asterix.external.adapter.factory.HiveAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "localfs", "Classname": "edu.uci.ics.asterix.external.adapter.factory.NCFileSystemAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "pull_twitter", "Classname": "edu.uci.ics.asterix.external.adapter.factory.PullBasedTwitterAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
 { "DataverseName": "Metadata", "Name": "rss_feed", "Classname": "edu.uci.ics.asterix.external.adapter.factory.RSSFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_adaptor", "Classname": "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
+{ "DataverseName": "Metadata", "Name": "socket_client", "Classname": "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory", "Type": "INTERNAL", "Timestamp": "Wed Nov 20 14:45:58 IST 2013" }
 { "DataverseName": "Metadata", "Name": "twitter_firehose", "Classname": "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory", "Type": "INTERNAL", "Timestamp": "Tue Jul 16 22:38:45 PDT 2013" }
diff --git a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
index e438cb7..8f50f49 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/meta17.adm
@@ -64,4 +64,4 @@
 { "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
 { "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
 { "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:17:12 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
index 9296ff1..6dc3613 100644
--- a/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
+++ b/asterix-app/src/test/resources/metadata/results/basic/metadata_datatype.adm
@@ -64,4 +64,4 @@
 { "DataverseName": "Metadata", "DatatypeName": "string", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
 { "DataverseName": "Metadata", "DatatypeName": "time", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
 { "DataverseName": "Metadata", "DatatypeName": "uuid", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
-{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
+{ "DataverseName": "Metadata", "DatatypeName": "year-month-duration", "Derived": null, "Timestamp": "Mon Aug 26 13:21:59 PDT 2013" }
\ No newline at end of file
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
index fe5b88f..a2e2f7d 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_02/feeds_02.1.ddl.aql
@@ -23,4 +23,4 @@
 
 create feed  TweetFeed
 using file_feed
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"));
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
index 6f539b0..8333f7c 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_04/feeds_04.1.ddl.aql
@@ -22,4 +22,4 @@
 
 create feed TweetFeed
 using file_feed
-(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("output-type-name"="TweetType"),("tuple-interval"="10"));
+(("fs"="hdfs"),("hdfs"="hdfs://127.0.0.1:31888"),("path"="/asterix/obamatweets.adm"),("format"="adm"),("input-format"="text-input-format"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql
new file mode 100644
index 0000000..09bea2a
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.1.ddl.aql
@@ -0,0 +1,47 @@
+/*
+ * Description  : Create a feed dataset and a feed using the generic socket feed adaptor. 
+                  To drive the socket adaptor based feed, we define another feed using the 
+                  client socket test adaptor. Content from the file is read by the test adaptor
+                  and forwarded to the socket feed adaptor that is listening at a known socket. 
+                  At the end, data is collected in the dataset that was fed by the socket feed adaptor. 
+                  Verify the existence of data in the dataset.
+                  
+ * Expected Res : Success
+ * Date         : 20th Nov 2013
+ */
+
+drop dataverse feeds if exists;
+create dataverse feeds;
+use dataverse feeds;
+
+create type TwitterUserType as closed {
+screen-name: string,
+lang: string,
+friends_count: int32,
+statuses_count: int32,
+name: string,
+followers_count: int32
+}
+
+create type TweetMessageType as closed {
+tweetid: int64,
+user: TwitterUserType,
+sender-location: point,
+send-time: datetime,
+referred-topics: {{ string }},
+message-text: string
+}
+
+create dataset MyTweets(TweetMessageType)
+primary key tweetid;
+
+create dataset DummyTweets(TweetMessageType)
+primary key tweetid;
+
+create feed socket_feed
+using socket_adaptor 
+(("sockets"="127.0.0.1:9009"),("addressType"="IP"),("type-name"="TweetMessageType"),("format"="adm"));
+
+create feed client_test_feed 
+using socket_client
+(("sockets"="127.0.0.1:9009"),("addressType"="IP"),("format"="adm"),("file_splits"="data/twitter/tw_messages_100.adm"));
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql
new file mode 100644
index 0000000..40cbd75
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.2.update.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : Create a feed dataset and a feed using the generic socket feed adaptor.
+                  To drive the socket adaptor based feed, we define another feed using the
+                  client socket test adaptor. Content from the file is read by the test adaptor
+                  and forwarded to the socket feed adaptor that is listening at a known socket.
+                  At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+                  Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date         : 20th Nov 2013
+*/
+  
+use dataverse feeds;
+
+set wait-for-completion-feed "false";
+
+connect feed socket_feed to dataset MyTweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
new file mode 100644
index 0000000..8bd1af1
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
@@ -0,0 +1 @@
+2000
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql
new file mode 100644
index 0000000..13b9618
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.4.update.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : Create a feed dataset and a feed using the generic socket feed adaptor.
+                  To drive the socket adaptor based feed, we define another feed using the
+                  client socket test adaptor. Content from the file is read by the test adaptor
+                  and forwarded to the socket feed adaptor that is listening at a known socket.
+                  At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+                  Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date         : 20th Nov 2013
+*/
+  
+use dataverse feeds;
+
+set wait-for-completion-feed "true";
+
+connect feed client_test_feed to dataset DummyTweets;
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql
new file mode 100644
index 0000000..70cf71c
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.5.query.aql
@@ -0,0 +1,17 @@
+/*
+ * Description  : Create a feed dataset and a feed using the generic socket feed adaptor.
+                  To drive the socket adaptor based feed, we define another feed using the
+                  client socket test adaptor. Content from the file is read by the test adaptor
+                  and forwarded to the socket feed adaptor that is listening at a known socket.
+                  At the end, data is collected in the dataset that was fed by the socket feed adaptor.
+                  Verify the existence of data in the dataset.
+
+ * Expected Res : Success
+ * Date         : 20th Nov 2013
+*/
+
+use dataverse feeds;
+
+for $x in dataset MyTweets
+order by $x.tweetid
+return $x
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
index 805a41c..d5a1c92 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.1.ddl.aql
@@ -22,6 +22,6 @@
 
 create feed TweetFeed
 using file_feed
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"));
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
 
 
diff --git a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
index 92f716d..3f000ed 100644
--- a/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/hints/issue_251_dataset_hint_7/issue_251_dataset_hint_7.1.ddl.aql
@@ -23,4 +23,4 @@
 
 create feed TweetFeed
 using file_feed
-(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("output-type-name"="TweetType"),("tuple-interval"="10"));
+(("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("type-name"="TweetType"),("tuple-interval"="10"));
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm
new file mode 100644
index 0000000..f60be3e
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_06/feeds_06.1.adm
@@ -0,0 +1,100 @@
+{ "tweetid": 1i64, "user": { "screen-name": "EdwardLeslie#333", "lang": "en", "friends_count": 31, "statuses_count": 107, "name": "Edward Leslie", "followers_count": 80 }, "sender-location": point("29.37,78.8"), "send-time": datetime("2005-10-14T10:10:00.000Z"), "referred-topics": {{ "at&t", "network" }}, "message-text": " can't stand at&t the network is terrible:(" }
+{ "tweetid": 2i64, "user": { "screen-name": "PenniBauerle$865", "lang": "en", "friends_count": 32, "statuses_count": 308, "name": "Penni Bauerle", "followers_count": 97 }, "sender-location": point("37.99,83.51"), "send-time": datetime("2011-09-23T10:10:00.000Z"), "referred-topics": {{ "iphone", "plan" }}, "message-text": " love iphone its plan is awesome" }
+{ "tweetid": 3i64, "user": { "screen-name": "TrudiSaline$17", "lang": "en", "friends_count": 2, "statuses_count": 248, "name": "Trudi Saline", "followers_count": 154 }, "sender-location": point("48.17,93.4"), "send-time": datetime("2007-07-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "3G" }}, "message-text": " like sprint its 3G is good:)" }
+{ "tweetid": 4i64, "user": { "screen-name": "EdytheMurray#502", "lang": "en", "friends_count": 23, "statuses_count": 142, "name": "Edythe Murray", "followers_count": 164 }, "sender-location": point("24.63,90.02"), "send-time": datetime("2008-03-16T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voice-clarity" }}, "message-text": " like t-mobile the voice-clarity is good:)" }
+{ "tweetid": 5i64, "user": { "screen-name": "CoralMoon#517", "lang": "en", "friends_count": 35, "statuses_count": 3, "name": "Coral Moon", "followers_count": 67 }, "sender-location": point("32.05,75.79"), "send-time": datetime("2006-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " love samsung the touch-screen is mind-blowing" }
+{ "tweetid": 6i64, "user": { "screen-name": "CarriePinney#881", "lang": "en", "friends_count": 77, "statuses_count": 113, "name": "Carrie Pinney", "followers_count": 120 }, "sender-location": point("45.72,93.27"), "send-time": datetime("2011-12-02T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is awesome:)" }
+{ "tweetid": 7i64, "user": { "screen-name": "AmadoTomey_367", "lang": "en", "friends_count": 28, "statuses_count": 379, "name": "Amado Tomey", "followers_count": 119 }, "sender-location": point("43.0,96.53"), "send-time": datetime("2011-07-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " hate verizon its platform is OMG:(" }
+{ "tweetid": 8i64, "user": { "screen-name": "OdellWallace#398", "lang": "en", "friends_count": 10, "statuses_count": 89, "name": "Odell Wallace", "followers_count": 4 }, "sender-location": point("28.61,90.69"), "send-time": datetime("2012-01-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is amazing:)" }
+{ "tweetid": 9i64, "user": { "screen-name": "NickLing#80", "lang": "en", "friends_count": 99, "statuses_count": 291, "name": "Nick Ling", "followers_count": 144 }, "sender-location": point("33.59,71.74"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "speed" }}, "message-text": " hate t-mobile the speed is horrible:(" }
+{ "tweetid": 10i64, "user": { "screen-name": "MickeyDunkle_962", "lang": "en", "friends_count": 46, "statuses_count": 429, "name": "Mickey Dunkle", "followers_count": 110 }, "sender-location": point("28.72,70.51"), "send-time": datetime("2006-05-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " can't stand at&t its reachability is OMG:(" }
+{ "tweetid": 11i64, "user": { "screen-name": "AlaynaKnopsnider$684", "lang": "en", "friends_count": 70, "statuses_count": 425, "name": "Alayna Knopsnider", "followers_count": 106 }, "sender-location": point("35.4,69.61"), "send-time": datetime("2012-08-15T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " dislike sprint the voice-command is bad" }
+{ "tweetid": 12i64, "user": { "screen-name": "SeraphinaWall_37", "lang": "en", "friends_count": 34, "statuses_count": 43, "name": "Seraphina Wall", "followers_count": 101 }, "sender-location": point("27.83,95.15"), "send-time": datetime("2010-02-08T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " like motorola its signal is amazing:)" }
+{ "tweetid": 13i64, "user": { "screen-name": "TonyaKnopsnider#342", "lang": "en", "friends_count": 96, "statuses_count": 479, "name": "Tonya Knopsnider", "followers_count": 105 }, "sender-location": point("27.95,74.39"), "send-time": datetime("2008-05-26T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " dislike motorola its voicemail-service is bad" }
+{ "tweetid": 14i64, "user": { "screen-name": "SkylerStough#713", "lang": "en", "friends_count": 29, "statuses_count": 41, "name": "Skyler Stough", "followers_count": 118 }, "sender-location": point("39.72,68.97"), "send-time": datetime("2012-05-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "3G" }}, "message-text": " love iphone its 3G is awesome:)" }
+{ "tweetid": 15i64, "user": { "screen-name": "IrisMillard$830", "lang": "en", "friends_count": 9, "statuses_count": 56, "name": "Iris Millard", "followers_count": 127 }, "sender-location": point("27.59,95.34"), "send-time": datetime("2010-02-07T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " like sprint the voice-clarity is amazing" }
+{ "tweetid": 16i64, "user": { "screen-name": "KaylynBrinigh_817", "lang": "en", "friends_count": 11, "statuses_count": 448, "name": "Kaylyn Brinigh", "followers_count": 53 }, "sender-location": point("25.19,79.71"), "send-time": datetime("2005-04-06T10:10:00.000Z"), "referred-topics": {{ "samsung", "customization" }}, "message-text": " love samsung its customization is amazing:)" }
+{ "tweetid": 17i64, "user": { "screen-name": "SungHoopengarner#732", "lang": "en", "friends_count": 55, "statuses_count": 129, "name": "Sung Hoopengarner", "followers_count": 152 }, "sender-location": point("47.75,93.12"), "send-time": datetime("2010-01-04T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " dislike motorola its voice-command is horrible:(" }
+{ "tweetid": 18i64, "user": { "screen-name": "RenatoRyals_261", "lang": "en", "friends_count": 46, "statuses_count": 439, "name": "Renato Ryals", "followers_count": 73 }, "sender-location": point("38.48,75.0"), "send-time": datetime("2010-04-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "signal" }}, "message-text": " love sprint its signal is good:)" }
+{ "tweetid": 19i64, "user": { "screen-name": "JohnnieHanseu#755", "lang": "en", "friends_count": 84, "statuses_count": 281, "name": "Johnnie Hanseu", "followers_count": 70 }, "sender-location": point("42.75,70.91"), "send-time": datetime("2010-06-12T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "3G" }}, "message-text": " like t-mobile its 3G is mind-blowing:)" }
+{ "tweetid": 20i64, "user": { "screen-name": "LindseyRahl#362", "lang": "en", "friends_count": 27, "statuses_count": 458, "name": "Lindsey Rahl", "followers_count": 24 }, "sender-location": point("36.2,94.8"), "send-time": datetime("2007-01-02T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is horrible:(" }
+{ "tweetid": 21i64, "user": { "screen-name": "CearaLing$289", "lang": "en", "friends_count": 39, "statuses_count": 177, "name": "Ceara Ling", "followers_count": 40 }, "sender-location": point("39.58,71.28"), "send-time": datetime("2008-05-20T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung the reachability is amazing:)" }
+{ "tweetid": 22i64, "user": { "screen-name": "DomoniqueEisenmann_636", "lang": "en", "friends_count": 27, "statuses_count": 465, "name": "Domonique Eisenmann", "followers_count": 166 }, "sender-location": point("47.11,77.87"), "send-time": datetime("2008-10-24T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " can't stand sprint its voice-command is horrible:(" }
+{ "tweetid": 23i64, "user": { "screen-name": "MelanieGadow$539", "lang": "en", "friends_count": 34, "statuses_count": 112, "name": "Melanie Gadow", "followers_count": 65 }, "sender-location": point("31.9,87.22"), "send-time": datetime("2012-07-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " like sprint its speed is mind-blowing:)" }
+{ "tweetid": 24i64, "user": { "screen-name": "HewiePeters#654", "lang": "en", "friends_count": 8, "statuses_count": 309, "name": "Hewie Peters", "followers_count": 15 }, "sender-location": point("42.84,90.27"), "send-time": datetime("2011-02-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t its wireless is terrible:(" }
+{ "tweetid": 25i64, "user": { "screen-name": "HollisJudge#731", "lang": "en", "friends_count": 58, "statuses_count": 211, "name": "Hollis Judge", "followers_count": 190 }, "sender-location": point("34.33,83.22"), "send-time": datetime("2006-11-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-clarity" }}, "message-text": " dislike samsung its voice-clarity is OMG:(" }
+{ "tweetid": 26i64, "user": { "screen-name": "DemarcusHarrow$822", "lang": "en", "friends_count": 60, "statuses_count": 171, "name": "Demarcus Harrow", "followers_count": 151 }, "sender-location": point("37.01,80.04"), "send-time": datetime("2012-07-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "shortcut-menu" }}, "message-text": " like at&t its shortcut-menu is awesome" }
+{ "tweetid": 27i64, "user": { "screen-name": "OrsonBauerle$52", "lang": "en", "friends_count": 91, "statuses_count": 271, "name": "Orson Bauerle", "followers_count": 144 }, "sender-location": point("48.91,75.54"), "send-time": datetime("2010-02-18T10:10:00.000Z"), "referred-topics": {{ "samsung", "speed" }}, "message-text": " love samsung the speed is amazing:)" }
+{ "tweetid": 28i64, "user": { "screen-name": "ChadBeach#363", "lang": "en", "friends_count": 88, "statuses_count": 275, "name": "Chad Beach", "followers_count": 142 }, "sender-location": point("35.5,73.83"), "send-time": datetime("2007-07-28T10:10:00.000Z"), "referred-topics": {{ "motorola", "signal" }}, "message-text": " love motorola its signal is mind-blowing" }
+{ "tweetid": 29i64, "user": { "screen-name": "LupeNewbern#345", "lang": "en", "friends_count": 99, "statuses_count": 45, "name": "Lupe Newbern", "followers_count": 86 }, "sender-location": point("35.07,70.43"), "send-time": datetime("2010-12-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "voicemail-service" }}, "message-text": " love t-mobile its voicemail-service is awesome" }
+{ "tweetid": 30i64, "user": { "screen-name": "LoydJohnston@664", "lang": "en", "friends_count": 86, "statuses_count": 10, "name": "Loyd Johnston", "followers_count": 58 }, "sender-location": point("42.55,72.33"), "send-time": datetime("2010-02-01T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone the network is awesome" }
+{ "tweetid": 31i64, "user": { "screen-name": "VerityMunson#211", "lang": "en", "friends_count": 75, "statuses_count": 359, "name": "Verity Munson", "followers_count": 165 }, "sender-location": point("30.65,77.21"), "send-time": datetime("2009-01-06T10:10:00.000Z"), "referred-topics": {{ "verizon", "voice-command" }}, "message-text": " can't stand verizon the voice-command is bad:(" }
+{ "tweetid": 32i64, "user": { "screen-name": "RinaHerndon#616", "lang": "en", "friends_count": 19, "statuses_count": 265, "name": "Rina Herndon", "followers_count": 26 }, "sender-location": point("40.76,75.79"), "send-time": datetime("2009-09-19T10:10:00.000Z"), "referred-topics": {{ "verizon", "shortcut-menu" }}, "message-text": " love verizon the shortcut-menu is mind-blowing:)" }
+{ "tweetid": 33i64, "user": { "screen-name": "MadelaineSchreckengost@250", "lang": "en", "friends_count": 45, "statuses_count": 310, "name": "Madelaine Schreckengost", "followers_count": 153 }, "sender-location": point("30.35,66.43"), "send-time": datetime("2005-07-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "plan" }}, "message-text": " can't stand at&t the plan is bad" }
+{ "tweetid": 34i64, "user": { "screen-name": "RadclyffeStaymates_289", "lang": "en", "friends_count": 50, "statuses_count": 188, "name": "Radclyffe Staymates", "followers_count": 97 }, "sender-location": point("45.42,77.18"), "send-time": datetime("2012-05-06T10:10:00.000Z"), "referred-topics": {{ "at&t", "customer-service" }}, "message-text": " hate at&t its customer-service is OMG" }
+{ "tweetid": 35i64, "user": { "screen-name": "VernieAlice$968", "lang": "en", "friends_count": 70, "statuses_count": 491, "name": "Vernie Alice", "followers_count": 193 }, "sender-location": point("28.03,79.37"), "send-time": datetime("2010-01-19T10:10:00.000Z"), "referred-topics": {{ "motorola", "voice-command" }}, "message-text": " can't stand motorola the voice-command is horrible" }
+{ "tweetid": 36i64, "user": { "screen-name": "GertieDugger#987", "lang": "en", "friends_count": 22, "statuses_count": 72, "name": "Gertie Dugger", "followers_count": 12 }, "sender-location": point("25.77,92.7"), "send-time": datetime("2009-09-25T10:10:00.000Z"), "referred-topics": {{ "sprint", "touch-screen" }}, "message-text": " like sprint its touch-screen is awesome" }
+{ "tweetid": 37i64, "user": { "screen-name": "AggieBollinger@675", "lang": "en", "friends_count": 45, "statuses_count": 175, "name": "Aggie Bollinger", "followers_count": 67 }, "sender-location": point("42.6,68.28"), "send-time": datetime("2012-02-22T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-clarity" }}, "message-text": " love sprint its voice-clarity is awesome" }
+{ "tweetid": 38i64, "user": { "screen-name": "JocelynPatton$328", "lang": "en", "friends_count": 35, "statuses_count": 484, "name": "Jocelyn Patton", "followers_count": 174 }, "sender-location": point("28.77,88.28"), "send-time": datetime("2006-12-09T10:10:00.000Z"), "referred-topics": {{ "at&t", "wireless" }}, "message-text": " hate at&t the wireless is horrible:(" }
+{ "tweetid": 39i64, "user": { "screen-name": "CandelariaHujsak#602", "lang": "en", "friends_count": 28, "statuses_count": 499, "name": "Candelaria Hujsak", "followers_count": 94 }, "sender-location": point("36.09,96.94"), "send-time": datetime("2007-11-23T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "wireless" }}, "message-text": " can't stand t-mobile the wireless is terrible:(" }
+{ "tweetid": 40i64, "user": { "screen-name": "DamarisMueller#283", "lang": "en", "friends_count": 46, "statuses_count": 122, "name": "Damaris Mueller", "followers_count": 189 }, "sender-location": point("44.31,73.93"), "send-time": datetime("2012-02-28T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " like sprint its wireless is awesome" }
+{ "tweetid": 41i64, "user": { "screen-name": "ChuckPhilbrick_884", "lang": "en", "friends_count": 73, "statuses_count": 237, "name": "Chuck Philbrick", "followers_count": 35 }, "sender-location": point("35.39,81.04"), "send-time": datetime("2012-05-07T10:10:00.000Z"), "referred-topics": {{ "verizon", "plan" }}, "message-text": " love verizon its plan is good:)" }
+{ "tweetid": 42i64, "user": { "screen-name": "BraxtonKifer_723", "lang": "en", "friends_count": 65, "statuses_count": 459, "name": "Braxton Kifer", "followers_count": 6 }, "sender-location": point("30.23,70.06"), "send-time": datetime("2007-10-15T10:10:00.000Z"), "referred-topics": {{ "verizon", "touch-screen" }}, "message-text": " dislike verizon the touch-screen is horrible" }
+{ "tweetid": 43i64, "user": { "screen-name": "DeshawnPorter#734", "lang": "en", "friends_count": 26, "statuses_count": 408, "name": "Deshawn Porter", "followers_count": 14 }, "sender-location": point("35.2,82.65"), "send-time": datetime("2005-10-06T10:10:00.000Z"), "referred-topics": {{ "sprint", "wireless" }}, "message-text": " love sprint its wireless is amazing" }
+{ "tweetid": 44i64, "user": { "screen-name": "SamanthaBeach$879", "lang": "en", "friends_count": 95, "statuses_count": 481, "name": "Samantha Beach", "followers_count": 119 }, "sender-location": point("30.28,89.79"), "send-time": datetime("2005-09-20T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " love motorola the network is mind-blowing:)" }
+{ "tweetid": 45i64, "user": { "screen-name": "NoelleBash_83", "lang": "en", "friends_count": 4, "statuses_count": 148, "name": "Noelle Bash", "followers_count": 139 }, "sender-location": point("42.4,96.94"), "send-time": datetime("2007-01-05T10:10:00.000Z"), "referred-topics": {{ "iphone", "platform" }}, "message-text": " hate iphone its platform is terrible:(" }
+{ "tweetid": 46i64, "user": { "screen-name": "RuthWells#712", "lang": "en", "friends_count": 51, "statuses_count": 415, "name": "Ruth Wells", "followers_count": 57 }, "sender-location": point("31.93,82.03"), "send-time": datetime("2007-04-21T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " dislike iphone the customization is bad:(" }
+{ "tweetid": 47i64, "user": { "screen-name": "NakiaClose@771", "lang": "en", "friends_count": 59, "statuses_count": 239, "name": "Nakia Close", "followers_count": 105 }, "sender-location": point("47.06,92.54"), "send-time": datetime("2005-02-18T10:10:00.000Z"), "referred-topics": {{ "motorola", "3G" }}, "message-text": " can't stand motorola its 3G is OMG:(" }
+{ "tweetid": 48i64, "user": { "screen-name": "EmLinton#420", "lang": "en", "friends_count": 87, "statuses_count": 481, "name": "Em Linton", "followers_count": 141 }, "sender-location": point("35.6,88.2"), "send-time": datetime("2006-09-24T10:10:00.000Z"), "referred-topics": {{ "iphone", "customer-service" }}, "message-text": " hate iphone its customer-service is horrible" }
+{ "tweetid": 49i64, "user": { "screen-name": "DarbyPatton_703", "lang": "en", "friends_count": 40, "statuses_count": 79, "name": "Darby Patton", "followers_count": 159 }, "sender-location": point("36.57,84.01"), "send-time": datetime("2006-06-14T10:10:00.000Z"), "referred-topics": {{ "verizon", "platform" }}, "message-text": " love verizon its platform is good" }
+{ "tweetid": 50i64, "user": { "screen-name": "WilburStephenson$295", "lang": "en", "friends_count": 57, "statuses_count": 337, "name": "Wilbur Stephenson", "followers_count": 188 }, "sender-location": point("38.35,83.92"), "send-time": datetime("2006-10-14T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " dislike motorola the plan is OMG:(" }
+{ "tweetid": 51i64, "user": { "screen-name": "PalmerHahn@368", "lang": "en", "friends_count": 13, "statuses_count": 196, "name": "Palmer Hahn", "followers_count": 69 }, "sender-location": point("48.96,88.74"), "send-time": datetime("2006-01-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " like samsung its shortcut-menu is awesome" }
+{ "tweetid": 52i64, "user": { "screen-name": "HarlanWynne_297", "lang": "en", "friends_count": 71, "statuses_count": 262, "name": "Harlan Wynne", "followers_count": 151 }, "sender-location": point("41.05,93.92"), "send-time": datetime("2008-07-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " like samsung its platform is awesome" }
+{ "tweetid": 53i64, "user": { "screen-name": "GrettaCable#405", "lang": "en", "friends_count": 7, "statuses_count": 324, "name": "Gretta Cable", "followers_count": 82 }, "sender-location": point("40.6,71.86"), "send-time": datetime("2010-11-16T10:10:00.000Z"), "referred-topics": {{ "iphone", "network" }}, "message-text": " like iphone its network is amazing:)" }
+{ "tweetid": 54i64, "user": { "screen-name": "PhilipaRing_461", "lang": "en", "friends_count": 43, "statuses_count": 53, "name": "Philipa Ring", "followers_count": 164 }, "sender-location": point("30.47,90.14"), "send-time": datetime("2011-12-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "voicemail-service" }}, "message-text": " like motorola its voicemail-service is amazing" }
+{ "tweetid": 55i64, "user": { "screen-name": "LindseyBurch_187", "lang": "en", "friends_count": 9, "statuses_count": 54, "name": "Lindsey Burch", "followers_count": 6 }, "sender-location": point("31.66,68.68"), "send-time": datetime("2011-12-21T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " can't stand samsung its touch-screen is terrible" }
+{ "tweetid": 56i64, "user": { "screen-name": "AnnabelLosey_61", "lang": "en", "friends_count": 53, "statuses_count": 381, "name": "Annabel Losey", "followers_count": 133 }, "sender-location": point("37.33,85.16"), "send-time": datetime("2005-11-14T10:10:00.000Z"), "referred-topics": {{ "sprint", "customization" }}, "message-text": " can't stand sprint the customization is horrible:(" }
+{ "tweetid": 57i64, "user": { "screen-name": "HectorLalty@132", "lang": "en", "friends_count": 2, "statuses_count": 195, "name": "Hector Lalty", "followers_count": 92 }, "sender-location": point("46.52,80.45"), "send-time": datetime("2012-04-15T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " hate iphone the reachability is bad:(" }
+{ "tweetid": 58i64, "user": { "screen-name": "KatieWilkins_817", "lang": "en", "friends_count": 95, "statuses_count": 476, "name": "Katie Wilkins", "followers_count": 151 }, "sender-location": point("44.72,69.13"), "send-time": datetime("2006-11-01T10:10:00.000Z"), "referred-topics": {{ "sprint", "voice-command" }}, "message-text": " like sprint the voice-command is amazing:)" }
+{ "tweetid": 59i64, "user": { "screen-name": "BrianneRamsey$451", "lang": "en", "friends_count": 13, "statuses_count": 69, "name": "Brianne Ramsey", "followers_count": 102 }, "sender-location": point("37.02,80.95"), "send-time": datetime("2007-02-08T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " dislike verizon the network is terrible" }
+{ "tweetid": 60i64, "user": { "screen-name": "RinaHujsak#7", "lang": "en", "friends_count": 69, "statuses_count": 73, "name": "Rina Hujsak", "followers_count": 63 }, "sender-location": point("28.27,73.68"), "send-time": datetime("2009-03-28T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is amazing:)" }
+{ "tweetid": 61i64, "user": { "screen-name": "GertieSadley$508", "lang": "en", "friends_count": 35, "statuses_count": 235, "name": "Gertie Sadley", "followers_count": 87 }, "sender-location": point("40.19,86.0"), "send-time": datetime("2006-07-27T10:10:00.000Z"), "referred-topics": {{ "at&t", "reachability" }}, "message-text": " love at&t its reachability is mind-blowing:)" }
+{ "tweetid": 62i64, "user": { "screen-name": "AaronJackson_273", "lang": "en", "friends_count": 98, "statuses_count": 205, "name": "Aaron Jackson", "followers_count": 128 }, "sender-location": point("48.11,85.01"), "send-time": datetime("2011-05-14T10:10:00.000Z"), "referred-topics": {{ "iphone", "voice-command" }}, "message-text": " like iphone the voice-command is awesome:)" }
+{ "tweetid": 63i64, "user": { "screen-name": "CreightonHujsak$142", "lang": "en", "friends_count": 21, "statuses_count": 68, "name": "Creighton Hujsak", "followers_count": 70 }, "sender-location": point("40.55,90.98"), "send-time": datetime("2010-08-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "voicemail-service" }}, "message-text": " love samsung the voicemail-service is amazing" }
+{ "tweetid": 64i64, "user": { "screen-name": "KazukoWilkinson$204", "lang": "en", "friends_count": 51, "statuses_count": 147, "name": "Kazuko Wilkinson", "followers_count": 86 }, "sender-location": point("29.64,94.45"), "send-time": datetime("2008-08-24T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " love motorola the speed is mind-blowing:)" }
+{ "tweetid": 65i64, "user": { "screen-name": "GonzaloDiegel#186", "lang": "en", "friends_count": 80, "statuses_count": 149, "name": "Gonzalo Diegel", "followers_count": 89 }, "sender-location": point("48.68,83.09"), "send-time": datetime("2008-04-24T10:10:00.000Z"), "referred-topics": {{ "at&t", "voicemail-service" }}, "message-text": " dislike at&t its voicemail-service is horrible:(" }
+{ "tweetid": 66i64, "user": { "screen-name": "KizzyKanaga$317", "lang": "en", "friends_count": 52, "statuses_count": 330, "name": "Kizzy Kanaga", "followers_count": 6 }, "sender-location": point("27.96,90.03"), "send-time": datetime("2009-10-19T10:10:00.000Z"), "referred-topics": {{ "at&t", "touch-screen" }}, "message-text": " like at&t the touch-screen is amazing" }
+{ "tweetid": 67i64, "user": { "screen-name": "CraigTreeby@171", "lang": "en", "friends_count": 72, "statuses_count": 44, "name": "Craig Treeby", "followers_count": 155 }, "sender-location": point("48.99,91.21"), "send-time": datetime("2006-02-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "signal" }}, "message-text": " love samsung the signal is amazing:)" }
+{ "tweetid": 68i64, "user": { "screen-name": "BrionySaltser#395", "lang": "en", "friends_count": 21, "statuses_count": 422, "name": "Briony Saltser", "followers_count": 129 }, "sender-location": point("37.33,67.08"), "send-time": datetime("2006-03-07T10:10:00.000Z"), "referred-topics": {{ "samsung", "shortcut-menu" }}, "message-text": " love samsung its shortcut-menu is amazing:)" }
+{ "tweetid": 69i64, "user": { "screen-name": "MagdaleneWerner$925", "lang": "en", "friends_count": 46, "statuses_count": 446, "name": "Magdalene Werner", "followers_count": 75 }, "sender-location": point("45.77,83.23"), "send-time": datetime("2005-06-09T10:10:00.000Z"), "referred-topics": {{ "iphone", "signal" }}, "message-text": " like iphone the signal is mind-blowing" }
+{ "tweetid": 70i64, "user": { "screen-name": "FlossieBaker$898", "lang": "en", "friends_count": 67, "statuses_count": 63, "name": "Flossie Baker", "followers_count": 50 }, "sender-location": point("44.37,89.4"), "send-time": datetime("2011-07-16T10:10:00.000Z"), "referred-topics": {{ "motorola", "network" }}, "message-text": " like motorola its network is good" }
+{ "tweetid": 71i64, "user": { "screen-name": "GradyGraff$247", "lang": "en", "friends_count": 21, "statuses_count": 58, "name": "Grady Graff", "followers_count": 45 }, "sender-location": point("24.81,67.13"), "send-time": datetime("2012-04-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "reachability" }}, "message-text": " like motorola the reachability is good" }
+{ "tweetid": 72i64, "user": { "screen-name": "MelitaLombardi@324", "lang": "en", "friends_count": 39, "statuses_count": 32, "name": "Melita Lombardi", "followers_count": 167 }, "sender-location": point("24.23,73.03"), "send-time": datetime("2011-02-26T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " hate verizon the network is terrible:(" }
+{ "tweetid": 73i64, "user": { "screen-name": "HerbertPowell_651", "lang": "en", "friends_count": 17, "statuses_count": 57, "name": "Herbert Powell", "followers_count": 167 }, "sender-location": point("47.22,92.69"), "send-time": datetime("2005-01-25T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customization" }}, "message-text": " love t-mobile its customization is awesome:)" }
+{ "tweetid": 74i64, "user": { "screen-name": "BasilSanborn$23", "lang": "en", "friends_count": 38, "statuses_count": 391, "name": "Basil Sanborn", "followers_count": 108 }, "sender-location": point("30.96,68.0"), "send-time": datetime("2008-12-25T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " can't stand samsung the network is bad" }
+{ "tweetid": 75i64, "user": { "screen-name": "LaurineZoucks$307", "lang": "en", "friends_count": 27, "statuses_count": 161, "name": "Laurine Zoucks", "followers_count": 144 }, "sender-location": point("40.78,91.08"), "send-time": datetime("2009-11-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " like motorola the customer-service is amazing" }
+{ "tweetid": 76i64, "user": { "screen-name": "LincolnMarriman@675", "lang": "en", "friends_count": 3, "statuses_count": 389, "name": "Lincoln Marriman", "followers_count": 125 }, "sender-location": point("28.4,83.82"), "send-time": datetime("2006-11-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "customer-service" }}, "message-text": " like verizon the customer-service is mind-blowing" }
+{ "tweetid": 77i64, "user": { "screen-name": "FrancesFinlay#683", "lang": "en", "friends_count": 71, "statuses_count": 174, "name": "Frances Finlay", "followers_count": 32 }, "sender-location": point("29.71,66.36"), "send-time": datetime("2012-04-18T10:10:00.000Z"), "referred-topics": {{ "iphone", "customization" }}, "message-text": " love iphone the customization is awesome" }
+{ "tweetid": 78i64, "user": { "screen-name": "ModestoMarriman_627", "lang": "en", "friends_count": 76, "statuses_count": 2, "name": "Modesto Marriman", "followers_count": 33 }, "sender-location": point("33.77,92.15"), "send-time": datetime("2011-09-26T10:10:00.000Z"), "referred-topics": {{ "samsung", "network" }}, "message-text": " love samsung its network is mind-blowing" }
+{ "tweetid": 79i64, "user": { "screen-name": "FlossieCamp#59", "lang": "en", "friends_count": 17, "statuses_count": 484, "name": "Flossie Camp", "followers_count": 142 }, "sender-location": point("24.67,77.24"), "send-time": datetime("2005-07-03T10:10:00.000Z"), "referred-topics": {{ "iphone", "reachability" }}, "message-text": " like iphone its reachability is awesome:)" }
+{ "tweetid": 80i64, "user": { "screen-name": "DouglasKing@553", "lang": "en", "friends_count": 62, "statuses_count": 251, "name": "Douglas King", "followers_count": 180 }, "sender-location": point("24.84,74.15"), "send-time": datetime("2009-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " can't stand sprint the speed is bad:(" }
+{ "tweetid": 81i64, "user": { "screen-name": "WardCasteel@972", "lang": "en", "friends_count": 8, "statuses_count": 358, "name": "Ward Casteel", "followers_count": 51 }, "sender-location": point("41.41,91.32"), "send-time": datetime("2007-05-08T10:10:00.000Z"), "referred-topics": {{ "at&t", "voice-command" }}, "message-text": " can't stand at&t the voice-command is terrible:(" }
+{ "tweetid": 82i64, "user": { "screen-name": "AdelaErskine#579", "lang": "en", "friends_count": 97, "statuses_count": 354, "name": "Adela Erskine", "followers_count": 155 }, "sender-location": point("35.56,68.19"), "send-time": datetime("2009-03-23T10:10:00.000Z"), "referred-topics": {{ "samsung", "touch-screen" }}, "message-text": " hate samsung the touch-screen is bad:(" }
+{ "tweetid": 83i64, "user": { "screen-name": "ClevelandPrevatt#255", "lang": "en", "friends_count": 24, "statuses_count": 159, "name": "Cleveland Prevatt", "followers_count": 68 }, "sender-location": point("38.6,67.51"), "send-time": datetime("2006-10-09T10:10:00.000Z"), "referred-topics": {{ "sprint", "platform" }}, "message-text": " hate sprint its platform is OMG:(" }
+{ "tweetid": 84i64, "user": { "screen-name": "MaxwellTreeby@610", "lang": "en", "friends_count": 21, "statuses_count": 168, "name": "Maxwell Treeby", "followers_count": 138 }, "sender-location": point("38.37,79.64"), "send-time": datetime("2007-07-17T10:10:00.000Z"), "referred-topics": {{ "motorola", "speed" }}, "message-text": " like motorola its speed is mind-blowing" }
+{ "tweetid": 85i64, "user": { "screen-name": "BobbyBastion$235", "lang": "en", "friends_count": 48, "statuses_count": 251, "name": "Bobby Bastion", "followers_count": 123 }, "sender-location": point("45.84,83.03"), "send-time": datetime("2009-03-14T10:10:00.000Z"), "referred-topics": {{ "samsung", "voice-command" }}, "message-text": " love samsung its voice-command is amazing" }
+{ "tweetid": 86i64, "user": { "screen-name": "ClairKanaga$512", "lang": "en", "friends_count": 88, "statuses_count": 274, "name": "Clair Kanaga", "followers_count": 77 }, "sender-location": point("46.34,84.86"), "send-time": datetime("2006-07-15T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " love samsung its reachability is mind-blowing:)" }
+{ "tweetid": 87i64, "user": { "screen-name": "HueyLosey_966", "lang": "en", "friends_count": 78, "statuses_count": 32, "name": "Huey Losey", "followers_count": 2 }, "sender-location": point("25.61,78.89"), "send-time": datetime("2011-03-22T10:10:00.000Z"), "referred-topics": {{ "samsung", "reachability" }}, "message-text": " like samsung its reachability is good:)" }
+{ "tweetid": 88i64, "user": { "screen-name": "SooThigpen#463", "lang": "en", "friends_count": 5, "statuses_count": 429, "name": "Soo Thigpen", "followers_count": 18 }, "sender-location": point("34.84,74.43"), "send-time": datetime("2009-03-09T10:10:00.000Z"), "referred-topics": {{ "motorola", "wireless" }}, "message-text": " love motorola the wireless is good:)" }
+{ "tweetid": 89i64, "user": { "screen-name": "LacreshaWire_320", "lang": "en", "friends_count": 92, "statuses_count": 127, "name": "Lacresha Wire", "followers_count": 194 }, "sender-location": point("47.73,86.79"), "send-time": datetime("2007-08-04T10:10:00.000Z"), "referred-topics": {{ "verizon", "wireless" }}, "message-text": " can't stand verizon its wireless is OMG:(" }
+{ "tweetid": 90i64, "user": { "screen-name": "MyriamLambert@966", "lang": "en", "friends_count": 22, "statuses_count": 452, "name": "Myriam Lambert", "followers_count": 193 }, "sender-location": point("41.85,88.44"), "send-time": datetime("2008-12-02T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "plan" }}, "message-text": " hate t-mobile the plan is bad" }
+{ "tweetid": 91i64, "user": { "screen-name": "WoodyWhite@341", "lang": "en", "friends_count": 12, "statuses_count": 183, "name": "Woody White", "followers_count": 31 }, "sender-location": point("29.04,85.35"), "send-time": datetime("2006-02-06T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "network" }}, "message-text": " like t-mobile its network is good" }
+{ "tweetid": 92i64, "user": { "screen-name": "QuinDickinson#157", "lang": "en", "friends_count": 84, "statuses_count": 415, "name": "Quin Dickinson", "followers_count": 9 }, "sender-location": point("40.86,67.52"), "send-time": datetime("2006-01-26T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "signal" }}, "message-text": " can't stand t-mobile the signal is horrible:(" }
+{ "tweetid": 93i64, "user": { "screen-name": "BettieRing@713", "lang": "en", "friends_count": 39, "statuses_count": 373, "name": "Bettie Ring", "followers_count": 98 }, "sender-location": point("26.37,69.03"), "send-time": datetime("2005-10-04T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "reachability" }}, "message-text": " dislike t-mobile the reachability is terrible:(" }
+{ "tweetid": 94i64, "user": { "screen-name": "LinaDraudy_733", "lang": "en", "friends_count": 70, "statuses_count": 228, "name": "Lina Draudy", "followers_count": 9 }, "sender-location": point("39.58,97.38"), "send-time": datetime("2012-03-13T10:10:00.000Z"), "referred-topics": {{ "verizon", "network" }}, "message-text": " like verizon the network is awesome:)" }
+{ "tweetid": 95i64, "user": { "screen-name": "StacyFleming#907", "lang": "en", "friends_count": 37, "statuses_count": 119, "name": "Stacy Fleming", "followers_count": 113 }, "sender-location": point("24.27,94.53"), "send-time": datetime("2007-10-08T10:10:00.000Z"), "referred-topics": {{ "samsung", "platform" }}, "message-text": " love samsung its platform is amazing:)" }
+{ "tweetid": 96i64, "user": { "screen-name": "AmbroseAllshouse_786", "lang": "en", "friends_count": 24, "statuses_count": 299, "name": "Ambrose Allshouse", "followers_count": 23 }, "sender-location": point("34.88,73.05"), "send-time": datetime("2009-01-09T10:10:00.000Z"), "referred-topics": {{ "verizon", "speed" }}, "message-text": " hate verizon the speed is horrible:(" }
+{ "tweetid": 97i64, "user": { "screen-name": "VaughnFocell_20", "lang": "en", "friends_count": 68, "statuses_count": 388, "name": "Vaughn Focell", "followers_count": 171 }, "sender-location": point("34.67,73.46"), "send-time": datetime("2012-01-24T10:10:00.000Z"), "referred-topics": {{ "t-mobile", "customer-service" }}, "message-text": " can't stand t-mobile its customer-service is terrible" }
+{ "tweetid": 98i64, "user": { "screen-name": "UlyssesCrissman#115", "lang": "en", "friends_count": 90, "statuses_count": 250, "name": "Ulysses Crissman", "followers_count": 110 }, "sender-location": point("24.81,93.59"), "send-time": datetime("2008-04-02T10:10:00.000Z"), "referred-topics": {{ "motorola", "customer-service" }}, "message-text": " love motorola its customer-service is awesome" }
+{ "tweetid": 99i64, "user": { "screen-name": "WatCrissman#703", "lang": "en", "friends_count": 50, "statuses_count": 244, "name": "Wat Crissman", "followers_count": 123 }, "sender-location": point("33.22,92.64"), "send-time": datetime("2006-09-15T10:10:00.000Z"), "referred-topics": {{ "motorola", "plan" }}, "message-text": " can't stand motorola the plan is terrible" }
+{ "tweetid": 100i64, "user": { "screen-name": "BambiLaurence$910", "lang": "en", "friends_count": 57, "statuses_count": 311, "name": "Bambi Laurence", "followers_count": 136 }, "sender-location": point("36.88,80.08"), "send-time": datetime("2008-04-26T10:10:00.000Z"), "referred-topics": {{ "sprint", "speed" }}, "message-text": " love sprint its speed is mind-blowing" }
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index bb0a7ab..aaaefc1 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -4425,6 +4425,11 @@
       </compilation-unit>
     </test-case>
     <test-case FilePath="feeds">
+      <compilation-unit name="feeds_06">
+        <output-dir compare="Text">feeds_06</output-dir>
+      </compilation-unit>
+    </test-case>
+    <test-case FilePath="feeds">
       <compilation-unit name="issue_230_feeds">
         <output-dir compare="Text">issue_230_feeds</output-dir>
       </compilation-unit>
diff --git a/asterix-common/.gitignore b/asterix-common/.gitignore
index 19f2e00..073c9fa 100644
--- a/asterix-common/.gitignore
+++ b/asterix-common/.gitignore
@@ -1,2 +1,3 @@
 /target
 /target
+/target
diff --git a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
index 4b930a9..f5a902a 100644
--- a/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
+++ b/asterix-common/src/main/java/edu/uci/ics/asterix/common/config/AsterixExternalProperties.java
@@ -35,7 +35,7 @@
 
     private static final String EXTERNAL_NC_JAVA_OPTS_KEY = "nc.java.opts";
     private static String EXTERNAL_NC_JAVA_OPTS_DEFAULT = "-Xmx1024m";
-    
+
     private static final String EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER = "max.wait.active.cluster";
     private static int EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT = 60;
 
@@ -72,7 +72,7 @@
         return accessor.getProperty(EXTERNAL_CC_JAVA_OPTS_KEY, EXTERNAL_CC_JAVA_OPTS_DEFAULT,
                 PropertyInterpreters.getStringPropertyInterpreter());
     }
-    
+
     public int getMaxWaitClusterActive() {
         return accessor.getProperty(EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER, EXTERNAL_MAX_WAIT_FOR_ACTIVE_CLUSTER_DEFAULT,
                 PropertyInterpreters.getIntegerPropertyInterpreter());
diff --git a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
index a22ac7a..30452e5 100644
--- a/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
+++ b/asterix-common/src/test/java/edu/uci/ics/asterix/test/aql/TestsUtils.java
@@ -254,7 +254,8 @@
             GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Method failed: " + method.getStatusLine());
             String[] errors = handleError(method);
             GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
-            throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2]);
+            throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: "
+                    + errors[2]);
         }
     }
 
@@ -284,7 +285,8 @@
             GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, "Method failed: " + method.getStatusLine());
             String[] errors = handleError(method);
             GlobalConfig.ASTERIX_LOGGER.log(Level.SEVERE, errors[2]);
-            throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: " + errors[2]);
+            throw new Exception("DDL operation failed: " + errors[0] + "\nSUMMARY: " + errors[1] + "\nSTACKTRACE: "
+                    + errors[2]);
         }
     }
 
@@ -421,6 +423,7 @@
                             break;
                         case "txnqar": //qar represents query after recovery
                             try {
+
                                 InputStream resultStream = executeQuery(statement);
 
                                 qarFile = new File(actualPath + File.separator
@@ -428,8 +431,10 @@
                                         + cUnit.getName() + "_qar.adm");
                                 qarFile.getParentFile().mkdirs();
                                 TestsUtils.writeResultsToFile(qarFile, resultStream);
+
                                 TestsUtils.runScriptAndCompareWithResult(testFile, new PrintWriter(System.err),
                                         qbcFile, qarFile);
+
                                 LOGGER.info("[TEST]: " + testCaseCtx.getTestCase().getFilePath() + "/"
                                         + cUnit.getName() + " PASSED ");
                             } catch (JsonMappingException e) {
@@ -459,6 +464,14 @@
                         case "sleep":
                             Thread.sleep(Long.parseLong(statement.trim()));
                             break;
+                        case "errddl": // a ddlquery that expects error
+                            try {
+                                TestsUtils.executeDDL(statement);
+
+                            } catch (Exception e) {
+                                // expected error happens
+                            }
+                            break;
                         default:
                             throw new IllegalArgumentException("No statements of type " + ctx.getType());
                     }
@@ -472,4 +485,5 @@
             }
         }
     }
+
 }
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java
index ff600ce..c29a96e 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/AsterixConstants.java
@@ -1,3 +1,17 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.asterix.event.util;
 
 public class AsterixConstants {
diff --git a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java
index e06d66c..9af9307 100644
--- a/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java
+++ b/asterix-events/src/main/java/edu/uci/ics/asterix/event/util/PatternCreator.java
@@ -499,6 +499,9 @@
 
         for (Node node : cluster.getNode()) {
             nodeid = new Nodeid(new Value(null, node.getId()));
+            if (node.getLogDir() != null) {
+                pargs = node.getLogDir();
+            }
             event = new Event("file_delete", nodeid, pargs);
             patternList.add(new Pattern(null, 1, null, event));
         }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
index bf89ccb..fa66715 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/CNNFeedAdapterFactory.java
@@ -19,7 +19,7 @@
 import java.util.List;
 import java.util.Map;
 
-import edu.uci.ics.asterix.external.dataset.adapter.CNNFeedAdapter;
+import edu.uci.ics.asterix.external.dataset.adapter.RSSFeedAdapter;
 import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
 import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
 import edu.uci.ics.asterix.om.types.ARecordType;
@@ -81,7 +81,7 @@
 
     @Override
     public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        CNNFeedAdapter cnnFeedAdapter = new CNNFeedAdapter(configuration, recordType, ctx);
+        RSSFeedAdapter cnnFeedAdapter = new RSSFeedAdapter(configuration, recordType, ctx);
         return cnnFeedAdapter;
     }
 
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
index 054fee3..669dc61 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/HiveAdapterFactory.java
@@ -14,49 +14,23 @@
  */
 package edu.uci.ics.asterix.external.adapter.factory;
 
-import java.util.Arrays;
-import java.util.HashMap;
 import java.util.Map;
 
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-
 import edu.uci.ics.asterix.external.dataset.adapter.HDFSAdapter;
 import edu.uci.ics.asterix.external.dataset.adapter.HiveAdapter;
 import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
 import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.asterix.om.util.AsterixClusterProperties;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.hdfs.dataflow.ConfFactory;
-import edu.uci.ics.hyracks.hdfs.dataflow.InputSplitsFactory;
-import edu.uci.ics.hyracks.hdfs.scheduler.Scheduler;
 
 /**
  * A factory class for creating an instance of HiveAdapter
  */
-@SuppressWarnings("deprecation")
 public class HiveAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
     private static final long serialVersionUID = 1L;
 
-    public static final String HDFS_ADAPTER_NAME = "hdfs";
-    public static final String CLUSTER_LOCATIONS = "cluster-locations";
-    public static transient String SCHEDULER = "hdfs-scheduler";
-
-    public static final String KEY_HDFS_URL = "hdfs";
-    public static final String KEY_PATH = "path";
-    public static final String KEY_INPUT_FORMAT = "input-format";
-    public static final String INPUT_FORMAT_TEXT = "text-input-format";
-    public static final String INPUT_FORMAT_SEQUENCE = "sequence-input-format";
-
-    public static final String KEY_FORMAT = "format";
-    public static final String KEY_PARSER_FACTORY = "parser";
-    public static final String FORMAT_DELIMITED_TEXT = "delimited-text";
-    public static final String FORMAT_ADM = "adm";
-
     public static final String HIVE_DATABASE = "database";
     public static final String HIVE_TABLE = "table";
     public static final String HIVE_HOME = "hive-home";
@@ -64,30 +38,19 @@
     public static final String HIVE_WAREHOUSE_DIR = "warehouse-dir";
     public static final String HIVE_METASTORE_RAWSTORE_IMPL = "rawstore-impl";
 
-    private String[] readSchedule;
-    private boolean executed[];
-    private InputSplitsFactory inputSplitsFactory;
-    private ConfFactory confFactory;
-    private transient AlgebricksPartitionConstraint clusterLocations;
+    private HDFSAdapterFactory hdfsAdapterFactory;
+    private HDFSAdapter hdfsAdapter;
     private boolean configured = false;
     private IAType atype;
 
-    private static final Map<String, String> formatClassNames = initInputFormatMap();
-
-    private static Map<String, String> initInputFormatMap() {
-        Map<String, String> formatClassNames = new HashMap<String, String>();
-        formatClassNames.put(INPUT_FORMAT_TEXT, "org.apache.hadoop.mapred.TextInputFormat");
-        formatClassNames.put(INPUT_FORMAT_SEQUENCE, "org.apache.hadoop.mapred.SequenceFileInputFormat");
-        return formatClassNames;
+    public HiveAdapterFactory() {
+        hdfsAdapterFactory = new HDFSAdapterFactory();
     }
 
     @Override
     public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        JobConf conf = confFactory.getConf();
-        InputSplit[] inputSplits = inputSplitsFactory.getSplits();
-        String nodeName = ctx.getJobletContext().getApplicationContext().getNodeId();
-        HiveAdapter hiveAdapter = new HiveAdapter(atype, readSchedule, executed, inputSplits, conf, clusterLocations,
-                nodeName, parserFactory, ctx);
+        hdfsAdapter = (HDFSAdapter) hdfsAdapterFactory.createAdapter(ctx, partition);
+        HiveAdapter hiveAdapter = new HiveAdapter(atype, hdfsAdapter, parserFactory, ctx);
         return hiveAdapter;
     }
 
@@ -109,32 +72,12 @@
     @Override
     public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
         if (!configured) {
-            /** set up the factory --serializable stuff --- this if-block should be called only once for each factory instance */
-            configureJobConf(configuration);
-            JobConf conf = configureJobConf(configuration);
-            confFactory = new ConfFactory(conf);
-
-            clusterLocations = AsterixClusterProperties.INSTANCE.getClusterLocations();
-            int numPartitions = ((AlgebricksAbsolutePartitionConstraint) clusterLocations).getLocations().length;
-
-            InputSplit[] inputSplits = conf.getInputFormat().getSplits(conf, numPartitions);
-            inputSplitsFactory = new InputSplitsFactory(inputSplits);
-
-            Scheduler scheduler = HDFSAdapterFactory.hdfsScheduler;
-            readSchedule = scheduler.getLocationConstraints(inputSplits);
-            executed = new boolean[readSchedule.length];
-            Arrays.fill(executed, false);
-
-            atype = (IAType) outputType;
-            configureFormat(atype);
-            configured = true;
+            populateConfiguration(configuration);
+            hdfsAdapterFactory.configure(configuration, outputType);
         }
-
     }
 
-    private JobConf configureJobConf(Map<String, String> configuration) throws Exception {
-        JobConf conf = new JobConf();
-
+    private void populateConfiguration(Map<String, String> configuration) throws Exception {
         /** configure hive */
         String database = (String) configuration.get(HIVE_DATABASE);
         String tablePath = null;
@@ -154,21 +97,11 @@
             throw new IllegalArgumentException("file input format"
                     + configuration.get(HDFSAdapterFactory.KEY_INPUT_FORMAT) + " is not supported");
         }
-
-        /** configure hdfs */
-        conf.set("fs.default.name", ((String) configuration.get(KEY_HDFS_URL)).trim());
-        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
-        conf.setClassLoader(HDFSAdapter.class.getClassLoader());
-        conf.set("mapred.input.dir", ((String) configuration.get(KEY_PATH)).trim());
-        conf.set("mapred.input.format.class",
-                (String) formatClassNames.get(((String) configuration.get(KEY_INPUT_FORMAT)).trim()));
-        return conf;
     }
 
     @Override
     public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
-        // TODO Auto-generated method stub
-        return null;
+        return hdfsAdapterFactory.getPartitionConstraint();
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
index 81b73e7..6058bd2 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/adapter/factory/PullBasedTwitterAdapterFactory.java
@@ -36,11 +36,24 @@
     public static final String PULL_BASED_TWITTER_ADAPTER_NAME = "pull_twitter";
 
     private Map<String, String> configuration;
-    private static ARecordType recordType;
+    private static ARecordType recordType = initOutputType();
+
+    private static ARecordType initOutputType() {
+        ARecordType recordType = null;
+        String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
+        IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
+                BuiltinType.ASTRING };
+        try {
+            recordType = new ARecordType("TweetType", fieldNames, fieldTypes, false);
+        } catch (Exception e) {
+            throw new IllegalStateException("Unable to create adapter output type");
+        }
+        return recordType;
+    }
 
     @Override
     public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        return new PullBasedTwitterAdapter(configuration, ctx);
+        return new PullBasedTwitterAdapter(configuration, recordType, ctx);
     }
 
     @Override
@@ -61,16 +74,6 @@
     @Override
     public void configure(Map<String, String> configuration) throws Exception {
         this.configuration = configuration;
-        if (recordType != null) {
-            String[] fieldNames = { "id", "username", "location", "text", "timestamp" };
-            IAType[] fieldTypes = { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.ASTRING,
-                    BuiltinType.ASTRING };
-            try {
-                recordType = new ARecordType("FeedRecordType", fieldNames, fieldTypes, false);
-            } catch (Exception e) {
-                throw new IllegalStateException("Unable to create adapter output type");
-            }
-        }
     }
 
     @Override
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
deleted file mode 100644
index 017b511..0000000
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/CNNFeedAdapter.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Copyright 2009-2013 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.asterix.external.dataset.adapter;
-
-import java.util.Map;
-
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
-import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
-import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
-import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-
-/**
- * An Adapter that provides the functionality of fetching news feed from CNN service
- * The Adapter provides news feed as ADM records.
- */
-public class CNNFeedAdapter extends RSSFeedAdapter implements IDatasourceAdapter, IFeedAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    public CNNFeedAdapter(Map<String, String> configuration, ARecordType recordType, IHyracksTaskContext ctx)
-            throws AsterixException {
-        super(configuration, recordType, ctx);
-    }
-
-}
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
index 1e694e2..1a046a5 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HDFSAdapter.java
@@ -27,7 +27,6 @@
 import org.apache.hadoop.mapred.TextInputFormat;
 
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.algebricks.common.exceptions.NotImplementedException;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
index f4ff44e..6280635 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/HiveAdapter.java
@@ -17,11 +17,7 @@
 import java.io.IOException;
 import java.io.InputStream;
 
-import org.apache.hadoop.mapred.InputSplit;
-import org.apache.hadoop.mapred.JobConf;
-
 import edu.uci.ics.asterix.om.types.IAType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.std.file.ITupleParserFactory;
@@ -29,26 +25,16 @@
 /**
  * Provides the functionality of fetching data in form of ADM records from a Hive dataset.
  */
-@SuppressWarnings("deprecation")
 public class HiveAdapter extends FileSystemBasedAdapter {
 
     private static final long serialVersionUID = 1L;
 
-    public static final String HIVE_DATABASE = "database";
-    public static final String HIVE_TABLE = "table";
-    public static final String HIVE_HOME = "hive-home";
-    public static final String HIVE_METASTORE_URI = "metastore-uri";
-    public static final String HIVE_WAREHOUSE_DIR = "warehouse-dir";
-    public static final String HIVE_METASTORE_RAWSTORE_IMPL = "rawstore-impl";
-
     private HDFSAdapter hdfsAdapter;
 
-    public HiveAdapter(IAType atype, String[] readSchedule, boolean[] executed, InputSplit[] inputSplits, JobConf conf,
-            AlgebricksPartitionConstraint clusterLocations, String nodeName, ITupleParserFactory parserFactory,
-            IHyracksTaskContext ctx) throws HyracksDataException {
+    public HiveAdapter(IAType atype, HDFSAdapter hdfsAdapter, ITupleParserFactory parserFactory, IHyracksTaskContext ctx)
+            throws HyracksDataException {
         super(parserFactory, atype, ctx);
-        this.hdfsAdapter = new HDFSAdapter(atype, readSchedule, executed, inputSplits, conf, nodeName, parserFactory,
-                ctx);
+        this.hdfsAdapter = hdfsAdapter;
     }
 
     @Override
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
index ee28c3a..be3a2fd 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/IPullBasedFeedClient.java
@@ -15,7 +15,6 @@
 package edu.uci.ics.asterix.external.dataset.adapter;
 
 import java.io.DataOutput;
-import java.util.Map;
 
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 
@@ -28,31 +27,16 @@
     }
 
     /**
-     * Writes the next fetched tuple into the provided instance of DatatOutput.
+     * Writes the next fetched tuple into the provided instance of DatatOutput. Invocation of this method blocks until
+     * a new tuple has been written or the specified time has expired.
      * 
      * @param dataOutput
      *            The receiving channel for the feed client to write ADM records to.
-     * @return true if a record was written to the DataOutput instance
-     *         false if no record was written to the DataOutput instance indicating non-availability of new data.
+     * @param timeout
+     *            Threshold time (expressed in seconds) for the next tuple to be obtained from the externa source.
+     * @return
      * @throws AsterixException
      */
-    public InflowState nextTuple(DataOutput dataOutput) throws AsterixException;
-
-    /**
-     * Provides logic for any corrective action that feed client needs to execute on
-     * encountering an exception.
-     * 
-     * @param e
-     *            The exception encountered during fetching of data from external source
-     * @throws AsterixException
-     */
-    public void resetOnFailure(Exception e) throws AsterixException;
-
-    /**
-     * @param configuration
-     */
-    public boolean alter(Map<String, String> configuration);
-
-    public void stop();
+    public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException;
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
index e2a4b76..193cce4 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAdapter.java
@@ -20,9 +20,8 @@
 import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.external.dataset.adapter.IPullBasedFeedClient.InflowState;
-import edu.uci.ics.asterix.metadata.feeds.AbstractFeedDatasourceAdapter;
-import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
-import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.asterix.metadata.feeds.FeedPolicyEnforcer;
+import edu.uci.ics.asterix.metadata.feeds.IPullBasedFeedAdapter;
 import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
@@ -36,11 +35,11 @@
  * the common logic for obtaining bytes from an external source and packing them
  * into frames as tuples.
  */
-public abstract class PullBasedAdapter extends AbstractFeedDatasourceAdapter implements IDatasourceAdapter,
-        IFeedAdapter {
+public abstract class PullBasedAdapter implements IPullBasedFeedAdapter {
 
     private static final long serialVersionUID = 1L;
     private static final Logger LOGGER = Logger.getLogger(PullBasedAdapter.class.getName());
+    private static final int timeout = 5; // seconds
 
     protected ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(1);
     protected IPullBasedFeedClient pullBasedFeedClient;
@@ -52,6 +51,17 @@
     private ByteBuffer frame;
     private long tupleCount = 0;
     private final IHyracksTaskContext ctx;
+    private int frameTupleCount = 0;
+
+    protected FeedPolicyEnforcer policyEnforcer;
+
+    public FeedPolicyEnforcer getPolicyEnforcer() {
+        return policyEnforcer;
+    }
+
+    public void setFeedPolicyEnforcer(FeedPolicyEnforcer policyEnforcer) {
+        this.policyEnforcer = policyEnforcer;
+    }
 
     public abstract IPullBasedFeedClient getFeedClient(int partition) throws Exception;
 
@@ -72,24 +82,36 @@
 
         pullBasedFeedClient = getFeedClient(partition);
         InflowState inflowState = null;
+
         while (continueIngestion) {
             tupleBuilder.reset();
             try {
-                inflowState = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput());
+                // blocking call
+                inflowState = pullBasedFeedClient.nextTuple(tupleBuilder.getDataOutput(), timeout);
                 switch (inflowState) {
                     case DATA_AVAILABLE:
                         tupleBuilder.addFieldEndOffset();
                         appendTupleToFrame(writer);
-                        tupleCount++;
+                        frameTupleCount++;
                         break;
                     case NO_MORE_DATA:
                         if (LOGGER.isLoggable(Level.INFO)) {
                             LOGGER.info("Reached end of feed");
                         }
                         FrameUtils.flushFrame(frame, writer);
+                        tupleCount += frameTupleCount;
+                        frameTupleCount = 0;
                         continueIngestion = false;
                         break;
                     case DATA_NOT_AVAILABLE:
+                        if (frameTupleCount > 0) {
+                            FrameUtils.flushFrame(frame, writer);
+                            tupleCount += frameTupleCount;
+                            frameTupleCount = 0;
+                        }
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Timed out on obtaining data from pull based adaptor. Trying again!");
+                        }
                         break;
                 }
 
@@ -98,7 +120,6 @@
                     failureException.printStackTrace();
                     boolean continueIngestion = policyEnforcer.continueIngestionPostSoftwareFailure(failureException);
                     if (continueIngestion) {
-                        pullBasedFeedClient.resetOnFailure(failureException);
                         tupleBuilder.reset();
                         continue;
                     } else {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java
index 95a527a..60143cd 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureFeedClient.java
@@ -6,7 +6,6 @@
 import java.util.Arrays;
 import java.util.Iterator;
 import java.util.List;
-import java.util.Map;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import java.util.regex.Matcher;
@@ -79,21 +78,7 @@
     }
 
     @Override
-    public void resetOnFailure(Exception e) throws AsterixException {
-        e.printStackTrace();
-    }
-
-    @Override
-    public boolean alter(Map<String, String> configuration) {
-        return false;
-    }
-
-    @Override
-    public void stop() {
-    }
-
-    @Override
-    public InflowState nextTuple(DataOutput dataOutput) throws AsterixException {
+    public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException {
         if (entityIt == null) {
             entityIt = ctc.execute(tableQuery).iterator();
         }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java
index 5cdd55a..a2f41b9 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedAzureTwitterAdapter.java
@@ -80,4 +80,9 @@
         }
         return new PullBasedAzureFeedClient(csa, outputType, tableName, null, null);
     }
+
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PULL;
+    }
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
index 8efe919..e728787 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedFeedClient.java
@@ -16,6 +16,8 @@
 
 import java.io.DataOutput;
 import java.io.IOException;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.builders.IARecordBuilder;
 import edu.uci.ics.asterix.builders.RecordBuilder;
@@ -45,6 +47,8 @@
 
 public abstract class PullBasedFeedClient implements IPullBasedFeedClient {
 
+    protected static final Logger LOGGER = Logger.getLogger(PullBasedFeedClient.class.getName());
+
     protected ARecordSerializerDeserializer recordSerDe;
     protected AMutableRecord mutableRecord;
     protected boolean messageReceived;
@@ -69,28 +73,36 @@
     public abstract InflowState setNextRecord() throws Exception;
 
     @Override
-    public InflowState nextTuple(DataOutput dataOutput) throws AsterixException {
+    public InflowState nextTuple(DataOutput dataOutput, int timeout) throws AsterixException {
         try {
-            System.out.println("Setting next record");
-            InflowState state = setNextRecord();
-            boolean first = true;
-            switch (state) {
-                case DATA_AVAILABLE:
-                    IAType t = mutableRecord.getType();
-                    ATypeTag tag = t.getTypeTag();
-                    dataOutput.writeByte(tag.serialize());
-                    if (first) {
+            InflowState state = null;
+            int waitCount = 0;
+            boolean continueWait = true;
+            while ((state == null || state.equals(InflowState.DATA_NOT_AVAILABLE)) && continueWait) {
+                state = setNextRecord();
+                switch (state) {
+                    case DATA_AVAILABLE:
+                        IAType t = mutableRecord.getType();
+                        ATypeTag tag = t.getTypeTag();
+                        dataOutput.writeByte(tag.serialize());
                         recordBuilder.reset(mutableRecord.getType());
-                        first = false;
-                    }
-                    recordBuilder.init();
-                    writeRecord(mutableRecord, dataOutput, recordBuilder);
-                    break;
-
-                case DATA_NOT_AVAILABLE:
-                    break;
-                case NO_MORE_DATA:
-                    break;
+                        recordBuilder.init();
+                        writeRecord(mutableRecord, dataOutput, recordBuilder);
+                        break;
+                    case DATA_NOT_AVAILABLE:
+                        if (waitCount > timeout) {
+                            continueWait = false;
+                        } else {
+                            if (LOGGER.isLoggable(Level.WARNING)) {
+                                LOGGER.warning("Waiting to obtaing data from pull based adaptor");
+                            }
+                            Thread.sleep(1000);
+                            waitCount++;
+                        }
+                        break;
+                    case NO_MORE_DATA:
+                        break;
+                }
             }
             return state;
         } catch (Exception e) {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
index 019d1b7..838cfeb 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterAdapter.java
@@ -33,7 +33,6 @@
     public static final String INTERVAL = "interval";
 
     private ARecordType recordType;
-    private final IHyracksTaskContext ctx;
     private PullBasedTwitterFeedClient tweetClient;
 
     @Override
@@ -41,19 +40,18 @@
         return tweetClient;
     }
 
-    public PullBasedTwitterAdapter(Map<String, String> configuration, IHyracksTaskContext ctx) throws AsterixException {
+    public PullBasedTwitterAdapter(Map<String, String> configuration, ARecordType recordType, IHyracksTaskContext ctx) throws AsterixException {
         super(configuration, ctx);
-        this.ctx = ctx;
-        tweetClient = new PullBasedTwitterFeedClient(ctx, this);
-    }
-
-    @Override
-    public void stop() {
-        tweetClient.stop();
+        tweetClient = new PullBasedTwitterFeedClient(ctx, recordType, this);
     }
 
     public ARecordType getAdapterOutputType() {
         return recordType;
     }
 
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PULL;
+    }
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
index 7a5aeea..2c8d659 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/PullBasedTwitterFeedClient.java
@@ -17,7 +17,6 @@
 import java.util.List;
 import java.util.Map;
 import java.util.UUID;
-import java.util.logging.Logger;
 
 import twitter4j.Query;
 import twitter4j.QueryResult;
@@ -25,7 +24,6 @@
 import twitter4j.Twitter;
 import twitter4j.TwitterException;
 import twitter4j.TwitterFactory;
-import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.dataflow.data.nontagged.serde.ARecordSerializerDeserializer;
 import edu.uci.ics.asterix.om.base.AMutableRecord;
 import edu.uci.ics.asterix.om.base.AMutableString;
@@ -42,7 +40,6 @@
 
     private String keywords;
     private Query query;
-    private String id_prefix;
     private Twitter twitter;
     private int requestInterval = 10; // seconds
     private QueryResult result;
@@ -52,14 +49,11 @@
     private ARecordType recordType;
     private int nextTweetIndex = 0;
 
-    private static final Logger LOGGER = Logger.getLogger(PullBasedTwitterFeedClient.class.getName());
-
-    public PullBasedTwitterFeedClient(IHyracksTaskContext ctx, PullBasedTwitterAdapter adapter) {
-        this.id_prefix = ctx.getJobletContext().getApplicationContext().getNodeId();
+    public PullBasedTwitterFeedClient(IHyracksTaskContext ctx, ARecordType recordType, PullBasedTwitterAdapter adapter) {
         twitter = new TwitterFactory().getInstance();
         mutableFields = new IAObject[] { new AMutableString(null), new AMutableString(null), new AMutableString(null),
                 new AMutableString(null), new AMutableString(null) };
-        recordType = adapter.getAdapterOutputType();
+        this.recordType = recordType;
         recordSerDe = new ARecordSerializerDeserializer(recordType);
         mutableRecord = new AMutableRecord(recordType, mutableFields);
         tupleFieldValues = new String[recordType.getFieldNames().length];
@@ -94,22 +88,6 @@
         return InflowState.DATA_AVAILABLE;
     }
 
-    @Override
-    public void resetOnFailure(Exception e) throws AsterixException {
-        // TOOO: implement resetting logic for Twitter
-    }
-
-    @Override
-    public boolean alter(Map<String, String> configuration) {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-    @Override
-    public void stop() {
-        // TODO Auto-generated method stub
-    }
-
     private void initialize(Map<String, String> params) {
         this.keywords = (String) params.get(PullBasedTwitterAdapter.QUERY);
         this.requestInterval = Integer.parseInt((String) params.get(PullBasedTwitterAdapter.INTERVAL));
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
index 6ace5c5..4eea034 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedAdapter.java
@@ -30,16 +30,14 @@
 
     private static final long serialVersionUID = 1L;
 
+    private static final String KEY_RSS_URL = "rss_url";
+
     private List<String> feedURLs = new ArrayList<String>();
-    private boolean isStopRequested = false;
     private String id_prefix = "";
 
     private IPullBasedFeedClient rssFeedClient;
-    private ARecordType recordType;
 
-    public boolean isStopRequested() {
-        return isStopRequested;
-    }
+    private ARecordType recordType;
 
     public RSSFeedAdapter(Map<String, String> configuration, ARecordType recordType, IHyracksTaskContext ctx)
             throws AsterixException {
@@ -48,15 +46,6 @@
         this.recordType = recordType;
     }
 
-    public void setStopRequested(boolean isStopRequested) {
-        this.isStopRequested = isStopRequested;
-    }
-
-    @Override
-    public void stop() {
-        isStopRequested = true;
-    }
-
     private void initializeFeedURLs(String rssURLProperty) {
         feedURLs.clear();
         String[] feedURLProperty = rssURLProperty.split(",");
@@ -66,7 +55,7 @@
     }
 
     protected void reconfigure(Map<String, String> arguments) {
-        String rssURLProperty = configuration.get("KEY_RSS_URL");
+        String rssURLProperty = configuration.get(KEY_RSS_URL);
         if (rssURLProperty != null) {
             initializeFeedURLs(rssURLProperty);
         }
@@ -84,4 +73,9 @@
         return recordType;
     }
 
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PULL;
+    }
+
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
index 8a4b301..41ed923 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/RSSFeedClient.java
@@ -18,7 +18,6 @@
 import java.net.URL;
 import java.util.LinkedList;
 import java.util.List;
-import java.util.Map;
 import java.util.Queue;
 
 import com.sun.syndication.feed.synd.SyndEntryImpl;
@@ -42,7 +41,6 @@
 @SuppressWarnings("rawtypes")
 public class RSSFeedClient extends PullBasedFeedClient {
 
-    private final String feedURL;
     private long id = 0;
     private String idPrefix;
     private boolean feedModified = false;
@@ -67,9 +65,8 @@
     }
 
     public RSSFeedClient(RSSFeedAdapter adapter, String feedURL, String id_prefix) throws MalformedURLException {
-        this.feedURL = feedURL;
         this.idPrefix = id_prefix;
-        feedUrl = new URL(feedURL);
+        this.feedUrl = new URL(feedURL);
         feedInfoCache = HashMapFeedInfoCache.getInstance();
         fetcher = new HttpURLFeedFetcher(feedInfoCache);
         listener = new FetcherEventListenerImpl(this);
@@ -114,7 +111,6 @@
     @SuppressWarnings("unchecked")
     private void fetchFeed() {
         try {
-            System.err.println("Retrieving feed " + feedURL);
             // Retrieve the feed.
             // We will get a Feed Polled Event and then a
             // Feed Retrieved event (assuming the feed is valid)
@@ -133,24 +129,6 @@
         }
     }
 
-    @Override
-    public void resetOnFailure(Exception e) {
-        // TODO Auto-generated method stub
-
-    }
-
-    @Override
-    public boolean alter(Map<String, String> configuration) {
-        // TODO Auto-generated method stub
-        return false;
-    }
-
-    @Override
-    public void stop() {
-        // TODO Auto-generated method stub
-
-    }
-
 }
 
 class FetcherEventListenerImpl implements FetcherListener {
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java
index f89a7ff..f09a841 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/dataset/adapter/StreamBasedAdapter.java
@@ -2,8 +2,9 @@
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
-import edu.uci.ics.asterix.metadata.feeds.AdapterRuntimeManager;
 import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.hyracks.api.comm.IFrameWriter;
@@ -16,14 +17,13 @@
 
     private static final long serialVersionUID = 1L;
 
-    public static final String NODE_RESOLVER_FACTORY_PROPERTY = "node.Resolver";
+    protected static final Logger LOGGER = Logger.getLogger(StreamBasedAdapter.class.getName());
 
     public abstract InputStream getInputStream(int partition) throws IOException;
 
     protected final ITupleParser tupleParser;
+
     protected final IAType sourceDatatype;
-    protected IHyracksTaskContext ctx;
-    protected AdapterRuntimeManager runtimeManager;
 
     public StreamBasedAdapter(ITupleParserFactory parserFactory, IAType sourceDatatype, IHyracksTaskContext ctx)
             throws HyracksDataException {
@@ -34,7 +34,13 @@
     @Override
     public void start(int partition, IFrameWriter writer) throws Exception {
         InputStream in = getInputStream(partition);
-        tupleParser.parse(in, writer);
+        if (in != null) {
+            tupleParser.parse(in, writer);
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Could not obtain input stream for parsing from adaptor " + this + "[" + partition + "]");
+            }
+        }
     }
 
 }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java
index d8cd823..fc629ea 100755
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/ExternalFunctionProvider.java
@@ -63,6 +63,7 @@
             setArguments(tuple);
             evaluate(functionHelper);
         } catch (Exception e) {
+            e.printStackTrace();
             throw new AlgebricksException(e);
         }
     }
diff --git a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java
index a4d0f59..3c5ddfd 100644
--- a/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java
+++ b/asterix-external-data/src/main/java/edu/uci/ics/asterix/external/library/JTypeObjectFactory.java
@@ -15,6 +15,7 @@
 package edu.uci.ics.asterix.external.library;
 
 import java.util.ArrayList;
+import java.util.List;
 
 import edu.uci.ics.asterix.external.library.java.IJObject;
 import edu.uci.ics.asterix.external.library.java.JObjects.JBoolean;
@@ -39,6 +40,8 @@
 import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
 import edu.uci.ics.asterix.om.types.AOrderedListType;
 import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.ATypeTag;
+import edu.uci.ics.asterix.om.types.AUnionType;
 import edu.uci.ics.asterix.om.types.AUnorderedListType;
 import edu.uci.ics.asterix.om.types.IAType;
 import edu.uci.ics.asterix.om.util.container.IObjectFactory;
@@ -121,8 +124,18 @@
                     index++;
                 }
                 retValue = new JRecord((ARecordType) type, fieldObjects);
-
                 break;
+            case UNION:
+                AUnionType unionType = (AUnionType) type;
+                List<IAType> unionList = unionType.getUnionList();
+                IJObject itemObject = null;
+                for (IAType elementType : unionList) {
+                    if (!elementType.getTypeTag().equals(ATypeTag.NULL)) {
+                        itemObject = create(elementType);
+                        break;
+                    }
+                }
+                return retValue = itemObject;
         }
         return retValue;
     }
diff --git a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java
index de13e59..1046518 100644
--- a/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java
+++ b/asterix-external-data/src/test/java/edu/uci/ics/asterix/external/library/ParseTweetFunction.java
@@ -14,18 +14,18 @@
  */
 package edu.uci.ics.asterix.external.library;
 
-import edu.uci.ics.asterix.external.library.java.JObjects.JOrderedList;
 import edu.uci.ics.asterix.external.library.java.JObjects.JRecord;
 import edu.uci.ics.asterix.external.library.java.JObjects.JString;
+import edu.uci.ics.asterix.external.library.java.JObjects.JUnorderedList;
 import edu.uci.ics.asterix.external.library.java.JTypeTag;
 
 public class ParseTweetFunction implements IExternalScalarFunction {
 
-    private JOrderedList list = null;
+    private JUnorderedList list = null;
 
     @Override
     public void initialize(IFunctionHelper functionHelper) {
-        list = new JOrderedList(functionHelper.getObject(JTypeTag.STRING));
+        list = new JUnorderedList(functionHelper.getObject(JTypeTag.STRING));
     }
 
     @Override
diff --git a/asterix-external-data/src/test/resources/text_functions.xml b/asterix-external-data/src/test/resources/text_functions.xml
index fef51aa..bd485a4 100644
--- a/asterix-external-data/src/test/resources/text_functions.xml
+++ b/asterix-external-data/src/test/resources/text_functions.xml
@@ -4,8 +4,8 @@
 		<libraryFunction>
 			<function_type>SCALAR</function_type>
 			<name>parseTweet</name>
-			<arguments>TweetType</arguments>
-			<return_type>TweetType</return_type>
+			<arguments>TweetInputType</arguments>
+			<return_type>TweetOutputType</return_type>
 			<definition>edu.uci.ics.asterix.external.library.ParseTweetFactory
 			</definition>
 		</libraryFunction>
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
index 63d2f33..5b024ec 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/command/CreateCommand.java
@@ -16,9 +16,6 @@
 
 import java.io.File;
 
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.Unmarshaller;
-
 import org.kohsuke.args4j.Option;
 
 import edu.uci.ics.asterix.common.configuration.AsterixConfiguration;
diff --git a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
index 53ec136..8461518 100644
--- a/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
+++ b/asterix-installer/src/main/java/edu/uci/ics/asterix/installer/driver/InstallerDriver.java
@@ -24,7 +24,6 @@
 import edu.uci.ics.asterix.event.service.AsterixEventService;
 import edu.uci.ics.asterix.event.service.ILookupService;
 import edu.uci.ics.asterix.event.service.ServiceProvider;
-import edu.uci.ics.asterix.event.util.PatternCreator;
 import edu.uci.ics.asterix.installer.command.CommandHandler;
 import edu.uci.ics.asterix.installer.schema.conf.Configuration;
 
diff --git a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
index 2013c72..39bd0a2 100644
--- a/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
+++ b/asterix-installer/src/test/java/edu/uci/ics/asterix/installer/transaction/RecoveryIT.java
@@ -26,6 +26,7 @@
 
 import org.apache.commons.io.FileUtils;
 import org.junit.AfterClass;
+import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.runner.RunWith;
@@ -71,15 +72,6 @@
         managixHomePath = new File(installerTargetPath, managixHomeDirName).getAbsolutePath();
         LOGGER.info("MANAGIX_HOME=" + managixHomePath);
 
-        String fileListPath = asterixInstallerPath.getAbsolutePath() + File.separator + "src" + File.separator + "test"
-                + File.separator + "resources" + File.separator + "transactionts" + File.separator + "data"
-                + File.separator + "file_list.txt";
-        String srcBasePath = asterixAppPath.getAbsolutePath();
-        String destBasePath = managixHomePath + File.separator + "clusters" + File.separator + "local" + File.separator
-                + "working_dir";
-        LOGGER.info("working dir: " + destBasePath);
-        prepareDataFiles(fileListPath, srcBasePath, destBasePath);
-
         pb = new ProcessBuilder();
         env = pb.environment();
         env.put("MANAGIX_HOME", managixHomePath);
@@ -93,24 +85,13 @@
                 + "stop_and_delete.sh");
     }
 
-    private static void prepareDataFiles(String fileListPath, String srcBasePath, String destBasePath)
-            throws IOException {
-        String line;
-        File srcPathFile = null;
-        File destPathFile = null;
-        BufferedReader br = new BufferedReader(new FileReader(fileListPath));
-        while ((line = br.readLine()) != null) {
-            srcPathFile = new File(srcBasePath + File.separator + line.trim());
-            destPathFile = new File(destBasePath + File.separator + line.trim());
-            destPathFile.getParentFile().mkdirs();
-            FileUtils.copyFile(srcPathFile, destPathFile);
-        }
-    }
-
     @AfterClass
     public static void tearDown() throws Exception {
         File outdir = new File(PATH_ACTUAL);
         FileUtils.deleteDirectory(outdir);
+        File dataCopyDir = new File(managixHomePath + File.separator + ".." + File.separator + ".." + File.separator
+                + "data");
+        FileUtils.deleteDirectory(dataCopyDir);
         TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
                 + "stop_and_delete.sh");
         TestsUtils.executeScript(pb, scriptHomePath + File.separator + "setup_teardown" + File.separator
@@ -135,4 +116,5 @@
     public void test() throws Exception {
         TestsUtils.executeTest(PATH_ACTUAL, tcCtx, pb);
     }
+
 }
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
similarity index 64%
rename from asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.1.ddl.aql
rename to asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
index 4221594..374e1b3 100644
--- a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.1.ddl.aql
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.1.ddl.aql
@@ -9,19 +9,27 @@
  */
 use dataverse externallibtest;
 
-create type TweetType as closed {
+create type TweetInputType as closed {
+  id: string,
+  username : string,
+  location : string,
+  text : string,
+  timestamp : string
+}
+
+create type TweetOutputType as closed {
   id: string,
   username : string,
   location : string,
   text : string,
   timestamp : string,
-  topics : {{string}}?
+  topics : {{string}}
 }
 
 create feed TweetFeed
 using "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory"
-(("output-type-name"="TweetType"),("fs"="localfs"),("path"="nc1://data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
+(("type-name"="TweetInputType"),("fs"="localfs"),("path"="127.0.0.1://../../../../../../asterix-app/data/twitter/obamatweets.adm"),("format"="adm"),("tuple-interval"="10"))
 apply function testlib#parseTweet;
 
-create dataset Tweets(TweetType)
+create dataset Tweets(TweetOutputType)
 primary key id;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.2.update.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql
similarity index 93%
rename from asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.2.update.aql
rename to asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql
index 028ac59..8dfa98d 100644
--- a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.2.update.aql
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.2.update.aql
@@ -9,4 +9,6 @@
  */
 use dataverse externallibtest;
 
+set wait-for-completion-feed "true";
+
 connect feed TweetFeed to dataset Tweets;
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.3.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.3.query.aql
similarity index 100%
rename from asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/ingest_feed/feed_ingest.3.query.aql
rename to asterix-installer/src/test/resources/integrationts/library/queries/library-feeds/feed_ingest/feed_ingest.3.query.aql
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm
new file mode 100644
index 0000000..1291213
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-feeds/feed_ingest/feed_ingest.1.adm
@@ -0,0 +1,12 @@
+{ "id": "nc1:1", "username": "BronsonMike", "location": "", "text": "@GottaLaff @reutersus Christie and obama just foul weather friends", "timestamp": "Thu Dec 06 16:53:06 PST 2012", "topics": {{  }} }
+{ "id": "nc1:100", "username": "KidrauhlProuds", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson  uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:16 PST 2012", "topics": {{  }} }
+{ "id": "nc1:102", "username": "jaysauce82", "location": "", "text": "Not voting for President Obama #BadDecision", "timestamp": "Thu Dec 06 16:53:16 PST 2012", "topics": {{ "#BadDecision" }} }
+{ "id": "nc1:104", "username": "princeofsupras", "location": "", "text": "RT @01Direclieber: A filha do Michael Jackson e uma Belieber,a filha do Eminem e uma Belieber,as filhas de Obama sao Beliebers, e a filha do meu pai e Belieber", "timestamp": "Thu Dec 06 16:53:15 PST 2012", "topics": {{  }} }
+{ "id": "nc1:106", "username": "GulfDogs", "location": "", "text": "Obama Admin Knew Libyan Terrorists Had US-Provided Weaponsteaparty #tcot #ccot #NewGuards #BreitbartArmy #patriotwttp://t.co/vJxzrQUE", "timestamp": "Thu Dec 06 16:53:14 PST 2012", "topics": {{ "#tcot", "#ccot", "#NewGuards", "#BreitbartArmy", "#patriotwttp://t.co/vJxzrQUE" }} }
+{ "id": "nc1:108", "username": "Laugzpz", "location": "", "text": "@AlfredoJalife Maestro Obama se hace de la vista gorda, es un acuerdo de siempre creo yo.", "timestamp": "Thu Dec 06 16:53:14 PST 2012", "topics": {{  }} }
+{ "id": "nc1:11", "username": "magarika", "location": "", "text": "RT @ken24xavier: Obama tells SOROS - our plan is ALMOST finished http://t.co/WvzK0GtU", "timestamp": "Thu Dec 06 16:53:05 PST 2012", "topics": {{  }} }
+{ "id": "nc1:111", "username": "ToucanMall", "location": "", "text": "RT @WorldWar3Watch: Michelle Obama Gets More Grammy Nominations Than Justin ...  #Obama #WW3 http://t.co/0Wv2GKij", "timestamp": "Thu Dec 06 16:53:13 PST 2012", "topics": {{ "#Obama", "#WW3" }} }
+{ "id": "nc1:113", "username": "ToucanMall", "location": "", "text": "RT @ObamaPalooza: Tiffany Shared What $2,000 Meant to Her ... and the President Stopped by to Talk About It http://t.co/sgT7lsNV #Obama", "timestamp": "Thu Dec 06 16:53:12 PST 2012", "topics": {{ "#Obama" }} }
+{ "id": "nc1:115", "username": "thewildpitch", "location": "", "text": "RT @RevkahJC: Dennis Miller: Obama Should Just Say He Wants To Tax Successful People http://t.co/Ihlemy9Y", "timestamp": "Thu Dec 06 16:53:11 PST 2012", "topics": {{  }} }
+{ "id": "nc1:117", "username": "Rnugent24", "location": "", "text": "RT @ConservativeQuo: unemployment is above 8% again. I wonder how long it will take for Obama to start blaming Bush? 3-2-1 #tcot #antiobama", "timestamp": "Thu Dec 06 16:53:10 PST 2012", "topics": {{ "#tcot", "#antiobama" }} }
+{ "id": "nc1:119", "username": "ToucanMall", "location": "", "text": "RT @Newitrsdotcom: I hope #Obama will win re-election... Other four years without meaningless #wars", "timestamp": "Thu Dec 06 16:53:09 PST 2012", "topics": {{ "#Obama", "#wars" }} }
diff --git a/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm
index bb42e62..4a8369b 100644
--- a/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm
+++ b/asterix-installer/src/test/resources/integrationts/library/results/library-metadata/functionDataset/functionDataset.1.adm
@@ -6,7 +6,7 @@
 			", "Language": "JAVA", "Kind": "SCALAR" }
 { "DataverseName": "externallibtest", "Name": "testlib#mysum", "Arity": "2", "Params": [ "AINT32", "AINT32" ], "ReturnType": "AINT32", "Definition": "edu.uci.ics.asterix.external.library.SumFactory
 			", "Language": "JAVA", "Kind": "SCALAR" }
-{ "DataverseName": "externallibtest", "Name": "testlib#parseTweet", "Arity": "1", "Params": [ "TweetType" ], "ReturnType": "TweetType", "Definition": "edu.uci.ics.asterix.external.library.ParseTweetFactory
+{ "DataverseName": "externallibtest", "Name": "testlib#parseTweet", "Arity": "1", "Params": [ "TweetInputType" ], "ReturnType": "TweetOutputType", "Definition": "edu.uci.ics.asterix.external.library.ParseTweetFactory
 			", "Language": "JAVA", "Kind": "SCALAR" }
 { "DataverseName": "externallibtest", "Name": "testlib#toUpper", "Arity": "1", "Params": [ "TextType" ], "ReturnType": "TextType", "Definition": "edu.uci.ics.asterix.external.library.UpperCaseFactory
 			", "Language": "JAVA", "Kind": "SCALAR" }
diff --git a/asterix-installer/src/test/resources/integrationts/library/testsuite.xml b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
index f97c050..65780d9 100644
--- a/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
+++ b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
@@ -38,5 +38,12 @@
       </compilation-unit>
     </test-case>
   </test-group>
+  <test-group name="library-feeds">
+    <test-case FilePath="library-feeds">
+      <compilation-unit name="feed_ingest">
+        <output-dir compare="Text">feed_ingest</output-dir>
+      </compilation-unit>
+    </test-case>
+  </test-group>
 </test-suite>
 
diff --git a/asterix-installer/src/test/resources/transactionts/data/file_list.txt b/asterix-installer/src/test/resources/transactionts/data/file_list.txt
deleted file mode 100644
index 4832ad0..0000000
--- a/asterix-installer/src/test/resources/transactionts/data/file_list.txt
+++ /dev/null
@@ -1 +0,0 @@
-data/csv/fragile_01.csv
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
index 1aebe8d..6d0e431 100644
--- a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_index_only/primary_index_only.3.update.aql
@@ -8,4 +8,4 @@
 use dataverse recovery;
 
 load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
-(("path"="127.0.0.1://data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql
new file mode 100644
index 0000000..50775b0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.2.ddl.aql
@@ -0,0 +1,53 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float,
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float,
+  
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create default secondary index on dataset clean Fragile */
+create index cfSidIdx on Fragile(sid);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql
new file mode 100644
index 0000000..5d7d6f2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.3.update.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql
new file mode 100644
index 0000000..12fd699
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id % 28000,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR,
+    "text": $t.text,
+    "location": $t.location,
+    "text2": $t.text2
+  }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql
new file mode 100644
index 0000000..4d41446
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.sid=1 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql
new file mode 100644
index 0000000..dd2cde5
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.8.txnqar.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.sid=1 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_default_secondary_index/primary_plus_default_secondary_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql
new file mode 100644
index 0000000..11397d7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.2.ddl.aql
@@ -0,0 +1,54 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float,
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+  
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float,
+  
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create keyword secondary index on dataset clean Fragile */
+create index cfText on Fragile(text) type keyword;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.3.update.aql
@@ -0,0 +1,12 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id % 28000,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR,
+    "text": $t.text,
+    "location": $t.location,
+    "text2": $t.text2
+  }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql
new file mode 100644
index 0000000..e40e389
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text, $x.text)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql
new file mode 100644
index 0000000..b72cb5f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.8.txnqar.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text, $x.text)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_keyword_secondary_index/primary_plus_keyword_secondary_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql
new file mode 100644
index 0000000..8b83fc8
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.2.ddl.aql
@@ -0,0 +1,60 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float,
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+  
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float,
+  
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+
+/* Create rtree secondary index on dataset clean Fragile */
+create index cfLocation on Fragile(location) type rtree;
+
+create index cfText on Fragile(text) type keyword;
+
+create index cfSidIdx on Fragile(sid);
+
+create index cfText2Ix on Fragile(text2) type ngram(3);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.3.update.aql
@@ -0,0 +1,12 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.4.txneu.aql
@@ -0,0 +1,25 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id % 28000,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR,
+    "text": $t.text,
+    "location": $t.location,
+    "text2": $t.text2
+  }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql
new file mode 100644
index 0000000..28ea155
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text2, $x.text2)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql
new file mode 100644
index 0000000..33551e6
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.8.txnqar.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text2, $x.text2)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_multiple_secondary_indices/primary_plus_multiple_secondary_indices.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql
new file mode 100644
index 0000000..78c7f43
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.2.ddl.aql
@@ -0,0 +1,53 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float,
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float,
+  
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+/* Create default secondary index on dataset clean Fragile */
+create index cfText2Ix on Fragile(text2) type ngram(3);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql
new file mode 100644
index 0000000..5d7d6f2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.3.update.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql
new file mode 100644
index 0000000..12fd699
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id % 28000,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR,
+    "text": $t.text,
+    "location": $t.location,
+    "text2": $t.text2
+  }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql
new file mode 100644
index 0000000..1383cc6
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text2, $x.text2)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql
new file mode 100644
index 0000000..0dbbf71
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.8.txnqar.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_default_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where edit-distance($x.text2, $x.text2)=0 return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_ngram_index/primary_plus_ngram_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql
new file mode 100644
index 0000000..4faa6e3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.2.ddl.aql
@@ -0,0 +1,54 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float,
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+  
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float,
+  
+  /* new string field and location field*/
+  text: string,
+  location: point,
+  text2: string
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
+
+/* Create rtree secondary index on dataset clean Fragile */
+create index cfLocation on Fragile(location) type rtree;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql
new file mode 100644
index 0000000..9d9757f
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.3.update.aql
@@ -0,0 +1,12 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_02.adm"),("format"="adm")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql
new file mode 100644
index 0000000..3c38973
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.4.txneu.aql
@@ -0,0 +1,25 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id % 28000,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR,
+    "text": $t.text,
+    "location": $t.location,
+    "text2": $t.text2
+  }
+);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql
new file mode 100644
index 0000000..1d8bc54
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.5.txnqbc.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Otc 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.location=$x.location return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.6.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.7.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql
new file mode 100644
index 0000000..4a94174
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.8.txnqar.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_plus_keyword_secondary_index.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : Oct 15 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile where $x.location=$x.location return $x);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recover_after_abort/primary_plus_rtree_index/primary_plus_rtree_index.9.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql
new file mode 100644
index 0000000..d32bbb0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.2.ddl.aql
@@ -0,0 +1,19 @@
+/* 
+ * Test case Name  : dataverse_recovery
+ * Description     :    
+ * Expected Result : 
+ * Date            : 
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+	id: int32,
+	text: string
+}
+
+create dataset SampleDS(SampleType)
+primary key id;
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql
new file mode 100644
index 0000000..da254c2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop dataset SampleDS;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql
new file mode 100644
index 0000000..da254c2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop dataset SampleDS;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataset_recovery/dataset_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql
new file mode 100644
index 0000000..762c652
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.2.ddl.aql
@@ -0,0 +1,16 @@
+/* 
+ * Test case Name  : dataverse_recovery
+ * Description     :    
+ * Expected Result : 
+ * Date            : 
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+	id: int32,
+	text: string
+}
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql
new file mode 100644
index 0000000..9b7dce7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop type SampleType;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql
new file mode 100644
index 0000000..9b7dce7
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop type SampleType;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/datatype_recovery/datatype_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql
new file mode 100644
index 0000000..93139d6
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.2.ddl.aql
@@ -0,0 +1,9 @@
+/* 
+ * Test case Name  : dataverse_recovery
+ * Description     :    
+ * Expected Result : 
+ * Date            : 
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql
new file mode 100644
index 0000000..9b12205
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.5.ddl.aql
@@ -0,0 +1 @@
+drop dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql
new file mode 100644
index 0000000..9b12205
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.6.errddl.aql
@@ -0,0 +1 @@
+drop dataverse SampleDV;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/dataverse_recovery/dataverse_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.3.update.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.4.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.5.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql
new file mode 100644
index 0000000..a5e94d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.6.update.aql
@@ -0,0 +1,5 @@
+use dataverse recovery;
+
+delete $r from dataset Fragile_raw 
+  where $r.row_id % 2 = 1
+;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql
new file mode 100644
index 0000000..a63aee2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.7.query.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile_raw return $x);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/delete_after_recovery/delete_after_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql
new file mode 100644
index 0000000..736f6cd
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.2.ddl.aql
@@ -0,0 +1,7 @@
+create dataverse recovery;
+
+use dataverse recovery;
+
+create function add($a, $b) {
+  $a + $b
+};
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql
new file mode 100644
index 0000000..9bafad3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.5.query.aql
@@ -0,0 +1,12 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+let $a := 1
+let $b := 2
+return add($a, $b);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql
new file mode 100644
index 0000000..fddc3fc
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.6.ddl.aql
@@ -0,0 +1,3 @@
+use dataverse recovery;
+
+drop function add @ 2;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql
new file mode 100644
index 0000000..fddc3fc
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.7.errddl.aql
@@ -0,0 +1,3 @@
+use dataverse recovery;
+
+drop function add @ 2;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/function_recovery/function_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.3.update.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.4.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.5.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql
new file mode 100644
index 0000000..26d998d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.6.update.aql
@@ -0,0 +1,15 @@
+use dataverse recovery;
+
+/* Load Fragile data from raw dataset into cleaned dataset */
+insert into dataset Fragile (
+  for $t in dataset Fragile_raw
+  return {
+    "row_id": $t.row_id,
+    "sid": $t.sid,
+    "date": date($t.date),
+    "day": $t.day,
+    "time": parse-time($t.time, "h:m:s"),
+    "bpm": $t.bpm,
+    "RR": $t.RR
+  }
+);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql
new file mode 100644
index 0000000..f8ed3d0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.7.query.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile return $x);
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/insert_after_recovery/insert_after_recovery.8.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql
new file mode 100644
index 0000000..3a8a9d2
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.2.ddl.aql
@@ -0,0 +1,41 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+drop dataverse recovery if exists;
+create dataverse recovery;
+use dataverse recovery;
+
+/* For raw Fragile data */
+create type FragileTypeRaw as closed {
+  row_id: int32,
+  sid: int32,
+  date: string,
+  day: int32,
+  time: string,
+  bpm: int32,
+  RR: float
+};
+
+/* For cleaned Fragile data */
+create type FragileType as closed {
+  row_id: int32,
+  sid: int32,
+  date: date,
+  day: int32,
+  time: time,
+  bpm: int32,
+  RR: float
+};
+
+/* Create dataset for loading raw Fragile data */
+create dataset Fragile_raw (FragileTypeRaw)
+primary key row_id;
+
+/* Create dataset for cleaned Fragile data */
+create dataset Fragile (FragileType)
+primary key row_id;
+
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql
new file mode 100644
index 0000000..37ef6c0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql
new file mode 100644
index 0000000..6d0e431
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.5.update.aql
@@ -0,0 +1,11 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+load dataset Fragile_raw using "edu.uci.ics.asterix.external.dataset.adapter.NCFileSystemAdapter"
+(("path"="127.0.0.1://../../../../../../asterix-app/data/csv/fragile_01.csv"),("format"="delimited-text"),("delimiter"=",")) pre-sorted;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql
new file mode 100644
index 0000000..fcd9e05
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.6.query.aql
@@ -0,0 +1,10 @@
+/* 
+ * Test case Name  : primary_index_only.aql
+ * Description     : Check that abort from duplicate key exception works and crash recovery works after the abort.   
+ * Expected Result : Success
+ * Date            : September 25 2013
+ */
+
+use dataverse recovery;
+
+count (for $x in dataset Fragile_raw return $x);
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql
new file mode 100644
index 0000000..f75dfc9
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/load_after_recovery/load_after_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql
new file mode 100644
index 0000000..323b1cf
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.1.script.aql
@@ -0,0 +1 @@
+create_and_start.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql
new file mode 100644
index 0000000..77a573b
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.2.ddl.aql
@@ -0,0 +1,21 @@
+/* 
+ * Test case Name  : dataverse_recovery
+ * Description     :    
+ * Expected Result : 
+ * Date            : 
+ */
+
+drop dataverse SampleDV if exists;
+create dataverse SampleDV;
+
+use dataverse SampleDV;
+
+create type SampleType as open {
+	id: int32,
+	text: string
+}
+
+create dataset SampleDS(SampleType)
+primary key id;
+
+create index SampleDSix on SampleDS(text);
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql
new file mode 100644
index 0000000..31d37ae
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.3.script.aql
@@ -0,0 +1 @@
+kill_cc_and_nc.sh
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql
new file mode 100644
index 0000000..3ba1dc0
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.4.script.aql
@@ -0,0 +1 @@
+stop_and_start.sh
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql
new file mode 100644
index 0000000..d765b16
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.5.ddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop index SampleDS.SampleDSix;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql
new file mode 100644
index 0000000..d765b16
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.6.errddl.aql
@@ -0,0 +1,2 @@
+use dataverse SampleDV;
+drop index SampleDS.SampleDSix;
diff --git a/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql
new file mode 100644
index 0000000..10e1a51
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/queries/recovery_ddl/secondary_index_recovery/secondary_index_recovery.7.script.aql
@@ -0,0 +1 @@
+stop_and_delete.sh
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm
new file mode 100644
index 0000000..88859f8
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/delete_after_recovery/delete_after_recovery.1.adm
@@ -0,0 +1 @@
+129088i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm
new file mode 100644
index 0000000..e440e5c
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/function_recovery/function_recovery.1.adm
@@ -0,0 +1 @@
+3
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm
new file mode 100644
index 0000000..cefb395
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/insert_after_recovery/insert_after_recovery.1.adm
@@ -0,0 +1 @@
+258176i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm
new file mode 100644
index 0000000..cefb395
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/results/recovery_ddl/load_after_recovery/load_after_recovery.1.adm
@@ -0,0 +1 @@
+258176i64
\ No newline at end of file
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_default_secondary_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_keyword_secondary_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_multiple_secondary_indices/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_ngram_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recover_after_abort/primary_plus_rtree_index/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataset_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/datatype_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/dataverse_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/delete_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/function_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/insert_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/load_after_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh
new file mode 100755
index 0000000..945f01d
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/create_and_start.sh
@@ -0,0 +1 @@
+$MANAGIX_HOME/bin/managix create -n nc1 -c $MANAGIX_HOME/clusters/local/local.xml;
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh
new file mode 100755
index 0000000..096d7df
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/kill_cc_and_nc.sh
@@ -0,0 +1 @@
+jps | awk '{if ($2 == "NCDriver" || $2 == "CCDriver") print $1;}' | xargs -n 1 kill -9
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh
new file mode 100755
index 0000000..d7deea3
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_delete.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix delete -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh
new file mode 100755
index 0000000..7855938
--- /dev/null
+++ b/asterix-installer/src/test/resources/transactionts/scripts/recovery_ddl/secondary_index_recovery/stop_and_start.sh
@@ -0,0 +1,3 @@
+$MANAGIX_HOME/bin/managix stop -n nc1;
+$MANAGIX_HOME/bin/managix start -n nc1;
+
diff --git a/asterix-installer/src/test/resources/transactionts/testsuite.xml b/asterix-installer/src/test/resources/transactionts/testsuite.xml
index 1991279..211564a 100644
--- a/asterix-installer/src/test/resources/transactionts/testsuite.xml
+++ b/asterix-installer/src/test/resources/transactionts/testsuite.xml
@@ -14,10 +14,90 @@
  !-->
 <test-suite xmlns="urn:xml.testframework.asterix.ics.uci.edu" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
   <test-group name="recover_after_abort">
+  
     <test-case FilePath="recover_after_abort">
       <compilation-unit name="primary_index_only">
         <output-dir compare="Text">primary_index_only</output-dir>
       </compilation-unit>
     </test-case>
+
+    <test-case FilePath="recover_after_abort">
+      <compilation-unit name="primary_plus_default_secondary_index">
+        <output-dir compare="Text">primary_plus_default_secondary_index</output-dir>
+      </compilation-unit>
+    </test-case>
+     
+    <test-case FilePath="recover_after_abort">
+      <compilation-unit name="primary_plus_rtree_index">
+        <output-dir compare="Text">primary_plus_rtree_index</output-dir>
+      </compilation-unit>
+    </test-case>
+   
+    <test-case FilePath="recover_after_abort">
+      <compilation-unit name="primary_plus_keyword_secondary_index">
+        <output-dir compare="Text">primary_plus_keyword_secondary_index</output-dir>
+      </compilation-unit>
+    </test-case>
+      
+    <test-case FilePath="recover_after_abort">
+      <compilation-unit name="primary_plus_ngram_index">
+        <output-dir compare="Text">primary_plus_ngram_index</output-dir>
+      </compilation-unit>
+    </test-case>
+ 
+	<test-case FilePath="recover_after_abort">
+      <compilation-unit name="primary_plus_multiple_secondary_indices">
+        <output-dir compare="Text">primary_plus_multiple_secondary_indices</output-dir>
+      </compilation-unit>
+    </test-case>
+<!-- 
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="dataverse_recovery">
+        <output-dir compare="Text">dataverse_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="datatype_recovery">
+        <output-dir compare="Text">datatype_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+    
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="dataset_recovery">
+        <output-dir compare="Text">dataset_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+    
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="secondary_index_recovery">
+        <output-dir compare="Text">secondary_index_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+  
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="load_after_recovery">
+        <output-dir compare="Text">load_after_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="insert_after_recovery">
+        <output-dir compare="Text">insert_after_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="delete_after_recovery">
+        <output-dir compare="Text">delete_after_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+
+    <test-case FilePath="recovery_ddl">
+      <compilation-unit name="function_recovery">
+        <output-dir compare="Text">function_recovery</output-dir>
+      </compilation-unit>
+    </test-case>
+--> 
   </test-group>
 </test-suite>
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
index 67997e4..3bdf73a 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/MetadataManager.java
@@ -22,6 +22,7 @@
 
 import edu.uci.ics.asterix.common.config.AsterixMetadataProperties;
 import edu.uci.ics.asterix.common.exceptions.ACIDException;
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.common.feeds.FeedConnectionId;
 import edu.uci.ics.asterix.common.functions.FunctionSignature;
 import edu.uci.ics.asterix.common.transactions.JobId;
@@ -42,6 +43,7 @@
 import edu.uci.ics.asterix.metadata.entities.Library;
 import edu.uci.ics.asterix.metadata.entities.Node;
 import edu.uci.ics.asterix.metadata.entities.NodeGroup;
+import edu.uci.ics.asterix.om.types.ARecordType;
 import edu.uci.ics.asterix.transaction.management.service.transaction.JobIdFactory;
 import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
 
@@ -81,7 +83,7 @@
     private static final int INITIAL_SLEEP_TIME = 64;
     private static final int RETRY_MULTIPLIER = 4;
     private static final int MAX_RETRY_COUNT = 6;
-    
+
     // Set in init().
     public static MetadataManager INSTANCE;
     private final MetadataCache cache = new MetadataCache();
@@ -382,7 +384,17 @@
         datatype = cache.getDatatype(dataverseName, datatypeName);
         if (datatype != null) {
             // Datatype is already in the cache, don't add it again.
-            return datatype;
+            try {
+                //create a new Datatype object with a new ARecordType object in order to avoid
+                //concurrent access to UTF8StringPointable comparator in ARecordType object.
+                //see issue 510
+                ARecordType aRecType = (ARecordType) datatype.getDatatype();
+                return new Datatype(datatype.getDataverseName(), datatype.getDatatypeName(), new ARecordType(
+                        aRecType.getTypeName(), aRecType.getFieldNames(), aRecType.getFieldTypes(), aRecType.isOpen()),
+                        datatype.getIsAnonymous());
+            } catch (AsterixException e) {
+                throw new MetadataException(e);
+            }
         }
         try {
             datatype = metadataNode.getDatatype(ctx.getJobId(), dataverseName, datatypeName);
@@ -823,4 +835,4 @@
         return dataverseAdapters;
     }
 
-}
\ No newline at end of file
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
index 4eff8ca..4da36d3 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/bootstrap/MetadataBootstrap.java
@@ -323,7 +323,8 @@
                 "edu.uci.ics.asterix.external.adapter.factory.CNNFeedAdapterFactory",
                 "edu.uci.ics.asterix.tools.external.data.RateControlledFileSystemBasedAdapterFactory",
                 "edu.uci.ics.asterix.tools.external.data.TwitterFirehoseFeedAdapterFactory",
-                "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory" };
+                "edu.uci.ics.asterix.tools.external.data.GenericSocketFeedAdapterFactory",
+                "edu.uci.ics.asterix.tools.external.data.SocketClientAdapterFactory" };
         DatasourceAdapter adapter;
         for (String adapterClassName : builtInAdapterClassNames) {
             adapter = getAdapter(adapterClassName);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
index f598f9d..c6c1559 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/declared/AqlMetadataProvider.java
@@ -958,7 +958,9 @@
         // generate field permutations
         int[] fieldPermutation = new int[numKeys];
         int[] bloomFilterKeyFields = new int[secondaryKeys.size()];
+        int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
         int i = 0;
+        int j = 0;
         for (LogicalVariable varKey : secondaryKeys) {
             int idx = propagatedSchema.findVariable(varKey);
             fieldPermutation[i] = idx;
@@ -968,7 +970,9 @@
         for (LogicalVariable varKey : primaryKeys) {
             int idx = propagatedSchema.findVariable(varKey);
             fieldPermutation[i] = idx;
+            modificationCallbackPrimaryKeyFields[j] = i;
             i++;
+            j++;
         }
 
         Dataset dataset = findDataset(dataverseName, datasetName);
@@ -1018,16 +1022,9 @@
             // prepare callback
             JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
-            int[] primaryKeyFields = new int[primaryKeys.size()];
-            i = 0;
-            for (LogicalVariable varKey : primaryKeys) {
-                int idx = propagatedSchema.findVariable(varKey);
-                primaryKeyFields[i] = idx;
-                i++;
-            }
             TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
             SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
+                    jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_BTREE);
 
             Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
                     dataset, mdTxnCtx);
@@ -1067,7 +1064,9 @@
         int numKeys = primaryKeys.size() + secondaryKeys.size();
         // generate field permutations
         int[] fieldPermutation = new int[numKeys];
+        int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
         int i = 0;
+        int j = 0;
         for (LogicalVariable varKey : secondaryKeys) {
             int idx = propagatedSchema.findVariable(varKey);
             fieldPermutation[i] = idx;
@@ -1076,7 +1075,9 @@
         for (LogicalVariable varKey : primaryKeys) {
             int idx = propagatedSchema.findVariable(varKey);
             fieldPermutation[i] = idx;
+            modificationCallbackPrimaryKeyFields[j] = i;
             i++;
+            j++;
         }
 
         boolean isPartitioned;
@@ -1147,16 +1148,9 @@
             // prepare callback
             JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
-            int[] primaryKeyFields = new int[primaryKeys.size()];
-            i = 0;
-            for (LogicalVariable varKey : primaryKeys) {
-                int idx = propagatedSchema.findVariable(varKey);
-                primaryKeyFields[i] = idx;
-                i++;
-            }
             TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
             SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_INVERTED_INDEX);
+                    jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_INVERTED_INDEX);
 
             Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
                     dataset, mdTxnCtx);
@@ -1203,7 +1197,9 @@
             ITypeTraits[] typeTraits = new ITypeTraits[numKeys];
             IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[numKeys];
             int[] fieldPermutation = new int[numKeys];
+            int[] modificationCallbackPrimaryKeyFields = new int[primaryKeys.size()];
             int i = 0;
+            int j = 0;
 
             for (LogicalVariable varKey : secondaryKeys) {
                 int idx = propagatedSchema.findVariable(varKey);
@@ -1213,7 +1209,9 @@
             for (LogicalVariable varKey : primaryKeys) {
                 int idx = propagatedSchema.findVariable(varKey);
                 fieldPermutation[i] = idx;
+                modificationCallbackPrimaryKeyFields[j] = i;
                 i++;
+                j++;
             }
             IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
             IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numSecondaryKeys];
@@ -1241,16 +1239,9 @@
             // prepare callback
             JobId jobId = ((JobEventListenerFactory) spec.getJobletEventListenerFactory()).getJobId();
             int datasetId = dataset.getDatasetId();
-            int[] primaryKeyFields = new int[numPrimaryKeys];
-            i = 0;
-            for (LogicalVariable varKey : primaryKeys) {
-                int idx = propagatedSchema.findVariable(varKey);
-                primaryKeyFields[i] = idx;
-                i++;
-            }
             TransactionSubsystemProvider txnSubsystemProvider = new TransactionSubsystemProvider();
             SecondaryIndexModificationOperationCallbackFactory modificationCallbackFactory = new SecondaryIndexModificationOperationCallbackFactory(
-                    jobId, datasetId, primaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
+                    jobId, datasetId, modificationCallbackPrimaryKeyFields, txnSubsystemProvider, indexOp, ResourceType.LSM_RTREE);
 
             Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(
                     dataset, mdTxnCtx);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
index bb1f60e..4bc0994 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/entitytupletranslators/DatasetTupleTranslator.java
@@ -332,4 +332,4 @@
         }
     }
 
-}
\ No newline at end of file
+}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java
deleted file mode 100644
index 2dc7c56..0000000
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AbstractFeedDatasourceAdapter.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package edu.uci.ics.asterix.metadata.feeds;
-
-public abstract class AbstractFeedDatasourceAdapter implements IDatasourceAdapter {
-
-    private static final long serialVersionUID = 1L;
-
-    protected FeedPolicyEnforcer policyEnforcer;
-
-    public FeedPolicyEnforcer getPolicyEnforcer() {
-        return policyEnforcer;
-    }
-
-    public void setFeedPolicyEnforcer(FeedPolicyEnforcer policyEnforcer) {
-        this.policyEnforcer = policyEnforcer;
-    }
-
-}
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java
index 1380d9e..b9a5e73 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/AdapterRuntimeManager.java
@@ -138,6 +138,7 @@
                 adapter.start(partition, writer);
                 runtimeManager.setState(State.FINISHED_INGESTION);
             } catch (Exception e) {
+                e.printStackTrace();
                 if (LOGGER.isLoggable(Level.SEVERE)) {
                     LOGGER.severe("Exception during feed ingestion " + e.getMessage());
                 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java
index 4520f71..3bd73a3 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/BuiltinFeedPolicies.java
@@ -35,7 +35,7 @@
     public static final FeedPolicy[] policies = new FeedPolicy[] { BRITTLE, BASIC, BASIC_MONITORED,
             FAULT_TOLERANT_BASIC_MONITORED, ELASTIC };
 
-    public static final FeedPolicy DEFAULT_POLICY = FAULT_TOLERANT_BASIC_MONITORED;
+    public static final FeedPolicy DEFAULT_POLICY = BASIC;
 
     public static final String CONFIG_FEED_POLICY_KEY = "policy";
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java
index 4f43e92..899da77 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedFrameWriter.java
@@ -332,10 +332,12 @@
     @Override
     public void fail() throws HyracksDataException {
         writer.fail();
-        if (healthMonitor != null && !healthMonitor.feedRuntimeType.equals(FeedRuntimeType.INGESTION)) {
-            healthMonitor.deactivate();
-        } else {
-            healthMonitor.reset();
+        if(healthMonitor != null) {
+            if (!healthMonitor.feedRuntimeType.equals(FeedRuntimeType.INGESTION)) {
+              healthMonitor.deactivate();
+            } else {
+              healthMonitor.reset();
+            }
         }
     }
 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
index 3dbd2e2..8f9adeb 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedIntakeOperatorNodePushable.java
@@ -27,6 +27,7 @@
 import edu.uci.ics.asterix.common.feeds.FeedRuntimeManager;
 import edu.uci.ics.asterix.common.feeds.IFeedManager;
 import edu.uci.ics.asterix.metadata.feeds.AdapterRuntimeManager.State;
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter.DataExchangeMode;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
@@ -79,8 +80,8 @@
                 adapterRuntimeMgr = new AdapterRuntimeManager(feedId, adapter, feedFrameWriter, partition, inbox,
                         feedManager);
 
-                if (adapter instanceof AbstractFeedDatasourceAdapter) {
-                    ((AbstractFeedDatasourceAdapter) adapter).setFeedPolicyEnforcer(policyEnforcer);
+                if (adapter.getDataExchangeMode().equals(DataExchangeMode.PULL) && adapter instanceof IPullBasedFeedAdapter) {
+                    ((IPullBasedFeedAdapter) adapter).setFeedPolicyEnforcer(policyEnforcer);
                 }
                 if (LOGGER.isLoggable(Level.INFO)) {
                     LOGGER.info("Beginning new feed:" + feedId);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java
index 31b19dd..8f140d7 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/FeedUtil.java
@@ -269,7 +269,12 @@
                     adapterOutputType = ((ITypedAdapterFactory) adapterFactory).getAdapterOutputType();
                     break;
                 case GENERIC:
-                    String outputTypeName = configuration.get("output-type-name");
+                    String outputTypeName = configuration.get(IGenericAdapterFactory.KEY_TYPE_NAME);
+                    if (outputTypeName == null) {
+                        throw new IllegalArgumentException(
+                                "You must specify the datatype associated with the incoming data. Datatype is specified by the "
+                                        + IGenericAdapterFactory.KEY_TYPE_NAME + " configuration parameter");
+                    }
                     adapterOutputType = (ARecordType) MetadataManager.INSTANCE.getDatatype(mdTxnCtx,
                             feed.getDataverseName(), outputTypeName).getDatatype();
                     ((IGenericAdapterFactory) adapterFactory).configure(configuration, (ARecordType) adapterOutputType);
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java
index f525420..55abd73 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IFeedAdapter.java
@@ -20,6 +20,16 @@
  */
 public interface IFeedAdapter extends IDatasourceAdapter {
 
+    public enum DataExchangeMode {
+        PULL,
+        PUSH
+    }
+
+    /**
+     * @return
+     */
+    public DataExchangeMode getDataExchangeMode();
+
     /**
      * Discontinue the ingestion of data and end the feed.
      * 
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java
index 54613d0..16c3c80 100644
--- a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IGenericAdapterFactory.java
@@ -20,6 +20,8 @@
 
 public interface IGenericAdapterFactory extends IAdapterFactory {
 
+    public static final String KEY_TYPE_NAME = "type-name";
+
     public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception;
 
 }
diff --git a/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java
new file mode 100644
index 0000000..50641b0
--- /dev/null
+++ b/asterix-metadata/src/main/java/edu/uci/ics/asterix/metadata/feeds/IPullBasedFeedAdapter.java
@@ -0,0 +1,28 @@
+/*
+x * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.metadata.feeds;
+
+public interface IPullBasedFeedAdapter extends IFeedAdapter {
+
+    /**
+     * @return
+     */
+    public FeedPolicyEnforcer getPolicyEnforcer();
+
+    /**
+     * @param feedPolicyEnforcer
+     */
+    public void setFeedPolicyEnforcer(FeedPolicyEnforcer feedPolicyEnforcer);
+}
diff --git a/asterix-test-framework/.gitignore b/asterix-test-framework/.gitignore
index ea8c4bf..19f2e00 100644
--- a/asterix-test-framework/.gitignore
+++ b/asterix-test-framework/.gitignore
@@ -1 +1,2 @@
 /target
+/target
diff --git a/asterix-tools/pom.xml b/asterix-tools/pom.xml
index 861dd79..3e9400f 100644
--- a/asterix-tools/pom.xml
+++ b/asterix-tools/pom.xml
@@ -154,6 +154,18 @@
 			<scope>compile</scope>
 		</dependency>
 		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-metadata</artifactId>
+			<version>0.8.1-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.asterix</groupId>
+			<artifactId>asterix-metadata</artifactId>
+			<version>0.8.1-SNAPSHOT</version>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
 			<groupId>org.apache.httpcomponents</groupId>
 			<artifactId>httpclient</artifactId>
 			<version>4.2.2</version>
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java
index 7c18670..ce22887 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/DataGenerator.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2009-2013 by The Regents of the University of California
+x * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -14,12 +14,6 @@
  */
 package edu.uci.ics.asterix.tools.external.data;
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.File;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
 import java.nio.CharBuffer;
 import java.util.ArrayList;
 import java.util.HashSet;
@@ -27,228 +21,53 @@
 import java.util.List;
 import java.util.Random;
 
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
-
-import org.w3c.dom.Document;
-import org.w3c.dom.Element;
-import org.w3c.dom.Node;
-import org.w3c.dom.NodeList;
-
 public class DataGenerator {
 
     private RandomDateGenerator randDateGen;
+
     private RandomNameGenerator randNameGen;
-    private RandomEmploymentGenerator randEmpGen;
+
     private RandomMessageGenerator randMessageGen;
+
     private RandomLocationGenerator randLocationGen;
 
-    private DistributionHandler fbDistHandler;
-    private DistributionHandler twDistHandler;
-
-    private int totalFbMessages;
-    private int numFbOnlyUsers;
-    private int totalTwMessages;
-    private int numTwOnlyUsers;
-
-    private int numCommonUsers;
-
-    private int fbUserId;
-    private int twUserId;
-
-    private int fbMessageId;
-    private int twMessageId;
-
     private Random random = new Random();
 
-    private String commonUserFbSuffix = "_fb";
-    private String commonUserTwSuffix = "_tw";
-
-    private String outputDir;
-
-    private PartitionConfiguration partition;
-
-    private FacebookUser fbUser = new FacebookUser();
     private TwitterUser twUser = new TwitterUser();
 
-    private FacebookMessage fbMessage = new FacebookMessage();
     private TweetMessage twMessage = new TweetMessage();
 
-    private int duration;
-
-    public DataGenerator(String[] args) throws Exception {
-        String controllerInstallDir = args[0];
-        String partitionConfXML = controllerInstallDir + "/output/partition-conf.xml";
-        String partitionName = args[1];
-        partition = XMLUtil.getPartitionConfiguration(partitionConfXML, partitionName);
-
-        // 1
-        randDateGen = new RandomDateGenerator(new Date(1, 1, 2005), new Date(8, 20, 2012));
-
-        String firstNameFile = controllerInstallDir + "/metadata/firstNames.txt";
-        String lastNameFile = controllerInstallDir + "/metadata/lastNames.txt";
-        String vendorFile = controllerInstallDir + "/metadata/vendors.txt";
-        String jargonFile = controllerInstallDir + "/metadata/jargon.txt";
-        String orgList = controllerInstallDir + "/metadata/org_list.txt";
-
-        randNameGen = new RandomNameGenerator(firstNameFile, lastNameFile);
-        randEmpGen = new RandomEmploymentGenerator(90, new Date(1, 1, 2000), new Date(8, 20, 2012), orgList);
-        randLocationGen = new RandomLocationGenerator(24, 49, 66, 98);
-        randMessageGen = new RandomMessageGenerator(vendorFile, jargonFile);
-
-        totalFbMessages = partition.getTargetPartition().getFbMessageIdMax()
-                - partition.getTargetPartition().getFbMessageIdMin() + 1;
-        numFbOnlyUsers = (partition.getTargetPartition().getFbUserKeyMax()
-                - partition.getTargetPartition().getFbUserKeyMin() + 1)
-                - partition.getTargetPartition().getCommonUsers();
-
-        totalTwMessages = partition.getTargetPartition().getTwMessageIdMax()
-                - partition.getTargetPartition().getTwMessageIdMin() + 1;
-        numTwOnlyUsers = (partition.getTargetPartition().getTwUserKeyMax()
-                - partition.getTargetPartition().getTwUserKeyMin() + 1)
-                - partition.getTargetPartition().getCommonUsers();
-
-        numCommonUsers = partition.getTargetPartition().getCommonUsers();
-        fbDistHandler = new DistributionHandler(totalFbMessages, 0.5, numFbOnlyUsers + numCommonUsers);
-        twDistHandler = new DistributionHandler(totalTwMessages, 0.5, numTwOnlyUsers + numCommonUsers);
-
-        fbUserId = partition.getTargetPartition().getFbUserKeyMin();
-        twUserId = partition.getTargetPartition().getTwUserKeyMin();
-
-        fbMessageId = partition.getTargetPartition().getFbMessageIdMin();
-        twMessageId = partition.getTargetPartition().getTwMessageIdMin();
-
-        outputDir = partition.getSourcePartition().getPath();
-    }
-
     public DataGenerator(InitializationInfo info) {
         initialize(info);
     }
 
-    private void generateFacebookOnlyUsers(int numFacebookUsers) throws IOException {
-        FileAppender appender = FileUtil.getFileAppender(outputDir + "/" + "fb_users.adm", true, true);
-        FileAppender messageAppender = FileUtil.getFileAppender(outputDir + "/" + "fb_message.adm", true, true);
-
-        for (int i = 0; i < numFacebookUsers; i++) {
-            getFacebookUser(null);
-            appender.appendToFile(fbUser.toString());
-            generateFacebookMessages(fbUser, messageAppender, -1);
-        }
-        appender.close();
-        messageAppender.close();
-    }
-
-    private void generateTwitterOnlyUsers(int numTwitterUsers) throws IOException {
-        FileAppender appender = FileUtil.getFileAppender(outputDir + "/" + "tw_users.adm", true, true);
-        FileAppender messageAppender = FileUtil.getFileAppender(outputDir + "/" + "tw_message.adm", true, true);
-
-        for (int i = 0; i < numTwitterUsers; i++) {
-            getTwitterUser(null);
-            appender.appendToFile(twUser.toString());
-            generateTwitterMessages(twUser, messageAppender, -1);
-        }
-        appender.close();
-        messageAppender.close();
-    }
-
-    private void generateCommonUsers() throws IOException {
-        FileAppender fbAppender = FileUtil.getFileAppender(outputDir + "/" + "fb_users.adm", true, false);
-        FileAppender twAppender = FileUtil.getFileAppender(outputDir + "/" + "tw_users.adm", true, false);
-        FileAppender fbMessageAppender = FileUtil.getFileAppender(outputDir + "/" + "fb_message.adm", true, false);
-        FileAppender twMessageAppender = FileUtil.getFileAppender(outputDir + "/" + "tw_message.adm", true, false);
-
-        for (int i = 0; i < numCommonUsers; i++) {
-            getFacebookUser(commonUserFbSuffix);
-            fbAppender.appendToFile(fbUser.toString());
-            generateFacebookMessages(fbUser, fbMessageAppender, -1);
-
-            getCorrespondingTwitterUser(fbUser);
-            twAppender.appendToFile(twUser.toString());
-            generateTwitterMessages(twUser, twMessageAppender, -1);
-        }
-
-        fbAppender.close();
-        twAppender.close();
-        fbMessageAppender.close();
-        twMessageAppender.close();
-    }
-
-    private void generateFacebookMessages(FacebookUser user, FileAppender appender, int numMsg) throws IOException {
-        Message message;
-        int numMessages = 0;
-        if (numMsg == -1) {
-            numMessages = fbDistHandler
-                    .getFromDistribution(fbUserId - partition.getTargetPartition().getFbUserKeyMin());
-        }
-        for (int i = 0; i < numMessages; i++) {
-            message = randMessageGen.getNextRandomMessage();
-            Point location = randLocationGen.getRandomPoint();
-            fbMessage.reset(fbMessageId++, user.getId(), random.nextInt(totalFbMessages + 1), location, message);
-            appender.appendToFile(fbMessage.toString());
-        }
-    }
-
-    private void generateTwitterMessages(TwitterUser user, FileAppender appender, int numMsg) throws IOException {
-        Message message;
-        int numMessages = 0;
-        if (numMsg == -1) {
-            numMessages = twDistHandler
-                    .getFromDistribution(twUserId - partition.getTargetPartition().getTwUserKeyMin());
-        }
-
-        for (int i = 0; i < numMessages; i++) {
-            message = randMessageGen.getNextRandomMessage();
-            Point location = randLocationGen.getRandomPoint();
-            DateTime sendTime = randDateGen.getNextRandomDatetime();
-            twMessage.reset(twMessageId, user, location, sendTime, message.getReferredTopics(), message);
-            twMessageId++;
-            appender.appendToFile(twMessage.toString());
-        }
-    }
-
-    public Iterator<TweetMessage> getTwitterMessageIterator(int partition, byte seed) {
-        return new TweetMessageIterator(duration, partition, seed);
-    }
-
     public class TweetMessageIterator implements Iterator<TweetMessage> {
 
         private final int duration;
-        private long startTime = 0;
         private final GULongIDGenerator idGen;
+        private long startTime = 0;
 
-        public TweetMessageIterator(int duration, int partition, byte seed) {
+        public TweetMessageIterator(int duration, GULongIDGenerator idGen) {
             this.duration = duration;
-            this.idGen = new GULongIDGenerator(partition, seed);
+            this.idGen = idGen;
+            this.startTime = System.currentTimeMillis();
         }
 
         @Override
         public boolean hasNext() {
-            if (startTime == 0) {
-                startTime = System.currentTimeMillis();
-            }
-            return System.currentTimeMillis() - startTime < duration * 1000;
+            return System.currentTimeMillis() - startTime <= duration * 1000;
         }
 
         @Override
         public TweetMessage next() {
+            TweetMessage msg = null;
             getTwitterUser(null);
             Message message = randMessageGen.getNextRandomMessage();
             Point location = randLocationGen.getRandomPoint();
             DateTime sendTime = randDateGen.getNextRandomDatetime();
             twMessage.reset(idGen.getNextULong(), twUser, location, sendTime, message.getReferredTopics(), message);
-            twMessageId++;
-            if (twUserId > numTwOnlyUsers) {
-                twUserId = 1;
-            }
-            return twMessage;
-
+            msg = twMessage;
+            return msg;
         }
 
         @Override
@@ -266,74 +85,13 @@
         public String[] vendors = DataGenerator.vendors;
         public String[] jargon = DataGenerator.jargon;
         public String[] org_list = DataGenerator.org_list;
-        public int percentEmployed = 90;
-        public Date employmentStartDate = new Date(1, 1, 2000);
-        public Date employmentEndDate = new Date(31, 12, 2012);
-        public int totalFbMessages;
-        public int numFbOnlyUsers;
-        public int totalTwMessages;
-        public int numTwOnlyUsers = 5000;
-        public int numCommonUsers;
-        public int fbUserIdMin;
-        public int fbMessageIdMin;
-        public int twUserIdMin;
-        public int twMessageIdMin;
-        public int timeDurationInSecs = 60;
-
     }
 
     public void initialize(InitializationInfo info) {
         randDateGen = new RandomDateGenerator(info.startDate, info.endDate);
         randNameGen = new RandomNameGenerator(info.firstNames, info.lastNames);
-        randEmpGen = new RandomEmploymentGenerator(info.percentEmployed, info.employmentStartDate,
-                info.employmentEndDate, info.org_list);
         randLocationGen = new RandomLocationGenerator(24, 49, 66, 98);
         randMessageGen = new RandomMessageGenerator(info.vendors, info.jargon);
-        fbDistHandler = new DistributionHandler(info.totalFbMessages, 0.5, info.numFbOnlyUsers + info.numCommonUsers);
-        twDistHandler = new DistributionHandler(info.totalTwMessages, 0.5, info.numTwOnlyUsers + info.numCommonUsers);
-        fbUserId = info.fbUserIdMin;
-        twUserId = info.twUserIdMin;
-
-        fbMessageId = info.fbMessageIdMin;
-        twMessageId = info.twMessageIdMin;
-        duration = info.timeDurationInSecs;
-    }
-
-    public static void main(String args[]) throws Exception {
-        if (args.length < 2) {
-            printUsage();
-            System.exit(1);
-        }
-
-        DataGenerator dataGenerator = new DataGenerator(args);
-        dataGenerator.generateData();
-    }
-
-    public static void printUsage() {
-        System.out.println(" Error: Invalid number of arguments ");
-        System.out.println(" Usage :" + " DataGenerator <path to configuration file> <partition name> ");
-    }
-
-    public void generateData() throws IOException {
-        generateFacebookOnlyUsers(numFbOnlyUsers);
-        generateTwitterOnlyUsers(numTwOnlyUsers);
-        generateCommonUsers();
-        System.out.println("Partition :" + partition.getTargetPartition().getName() + " finished");
-    }
-
-    public void getFacebookUser(String usernameSuffix) {
-        String suggestedName = randNameGen.getRandomName();
-        String[] nameComponents = suggestedName.split(" ");
-        String name = nameComponents[0] + nameComponents[1];
-        if (usernameSuffix != null) {
-            name = name + usernameSuffix;
-        }
-        String alias = nameComponents[0];
-        String userSince = randDateGen.getNextRandomDatetime().toString();
-        int numFriends = random.nextInt(25);
-        int[] friendIds = RandomUtil.getKFromN(numFriends, (numFbOnlyUsers + numCommonUsers));
-        Employment emp = randEmpGen.getRandomEmployment();
-        fbUser.reset(fbUserId++, alias, name, userSince, friendIds, emp);
     }
 
     public void getTwitterUser(String usernameSuffix) {
@@ -348,17 +106,6 @@
         int statusesCount = random.nextInt(500); // draw from Zipfian
         int followersCount = random.nextInt((int) (200));
         twUser.reset(screenName, numFriends, statusesCount, name, followersCount);
-        twUserId++;
-    }
-
-    public void getCorrespondingTwitterUser(FacebookUser fbUser) {
-        String screenName = fbUser.getName().substring(0, fbUser.getName().lastIndexOf(commonUserFbSuffix))
-                + commonUserTwSuffix;
-        String name = screenName.split(" ")[0];
-        int numFriends = random.nextInt((int) ((numTwOnlyUsers + numCommonUsers)));
-        int statusesCount = random.nextInt(500); // draw from Zipfian
-        int followersCount = random.nextInt((int) (numTwOnlyUsers + numCommonUsers));
-        twUser.reset(screenName, numFriends, statusesCount, name, followersCount);
     }
 
     public static class RandomDateGenerator {
@@ -374,10 +121,10 @@
         public RandomDateGenerator(Date startDate, Date endDate) {
             this.startDate = startDate;
             this.endDate = endDate;
-            yearDifference = endDate.getYear() - startDate.getYear() + 1;
-            workingDate = new Date();
-            recentDate = new Date();
-            dateTime = new DateTime();
+            this.yearDifference = endDate.getYear() - startDate.getYear() + 1;
+            this.workingDate = new Date();
+            this.recentDate = new Date();
+            this.dateTime = new DateTime();
         }
 
         public Date getStartDate() {
@@ -427,15 +174,6 @@
             return recentDate;
         }
 
-        public static void main(String args[]) throws Exception {
-            RandomDateGenerator dgen = new RandomDateGenerator(new Date(1, 1, 2005), new Date(8, 20, 2012));
-            while (true) {
-                Date nextDate = dgen.getNextRandomDate();
-                if (nextDate.getDay() == 0) {
-                    throw new Exception("invalid date " + nextDate);
-                }
-            }
-        }
     }
 
     public static class DateTime extends Date {
@@ -443,15 +181,15 @@
         private String hour = "10";
         private String min = "10";
         private String sec = "00";
-        private long chrononTime;
 
         public DateTime(int month, int day, int year, String hour, String min, String sec) {
             super(month, day, year);
             this.hour = hour;
             this.min = min;
             this.sec = sec;
-            chrononTime = new java.util.Date(year, month, day, Integer.parseInt(hour), Integer.parseInt(min),
-                    Integer.parseInt(sec)).getTime();
+        }
+
+        public DateTime() {
         }
 
         public void reset(int month, int day, int year, String hour, String min, String sec) {
@@ -461,11 +199,6 @@
             this.hour = hour;
             this.min = min;
             this.sec = sec;
-            chrononTime = new java.util.Date(year, month, day, Integer.parseInt(hour), Integer.parseInt(min),
-                    Integer.parseInt(sec)).getTime();
-        }
-
-        public DateTime() {
         }
 
         public DateTime(Date date) {
@@ -483,22 +216,6 @@
             this.sec = (sec < 10) ? "0" : "" + sec;
         }
 
-        public long getChrononTime() {
-            return chrononTime;
-        }
-
-        public String getHour() {
-            return hour;
-        }
-
-        public String getMin() {
-            return min;
-        }
-
-        public String getSec() {
-            return sec;
-        }
-
         public String toString() {
             StringBuilder builder = new StringBuilder();
             builder.append("datetime");
@@ -532,10 +249,6 @@
             length = 0;
         }
 
-        public char[] getMessage() {
-            return message;
-        }
-
         public List<String> getReferredTopics() {
             return referredTopics;
         }
@@ -598,11 +311,6 @@
 
         private final String[] connectors = new String[] { "_", "#", "$", "@" };
 
-        public RandomNameGenerator(String firstNameFilePath, String lastNameFilePath) throws IOException {
-            firstNames = FileUtil.listyFile(new File(firstNameFilePath)).toArray(new String[] {});
-            lastNames = FileUtil.listyFile(new File(lastNameFilePath)).toArray(new String[] {});
-        }
-
         public RandomNameGenerator(String[] firstNames, String[] lastNames) {
             this.firstNames = firstNames;
             this.lastNames = lastNames;
@@ -631,12 +339,6 @@
 
         private final MessageTemplate messageTemplate;
 
-        public RandomMessageGenerator(String vendorFilePath, String jargonFilePath) throws IOException {
-            List<String> vendors = FileUtil.listyFile(new File(vendorFilePath));
-            List<String> jargon = FileUtil.listyFile(new File(jargonFilePath));
-            this.messageTemplate = new MessageTemplate(vendors, jargon);
-        }
-
         public RandomMessageGenerator(String[] vendors, String[] jargon) {
             List<String> vendorList = new ArrayList<String>();
             for (String v : vendors) {
@@ -737,106 +439,15 @@
         }
     }
 
-    public static class FileUtil {
-
-        public static List<String> listyFile(File file) throws IOException {
-            BufferedReader reader = new BufferedReader(new FileReader(file));
-            String line;
-            List<String> list = new ArrayList<String>();
-            while (true) {
-                line = reader.readLine();
-                if (line == null) {
-                    break;
-                }
-                list.add(line);
-            }
-            reader.close();
-            return list;
-        }
-
-        public static FileAppender getFileAppender(String filePath, boolean createIfNotExists, boolean overwrite)
-                throws IOException {
-            return new FileAppender(filePath, createIfNotExists, overwrite);
-        }
-    }
-
-    public static class FileAppender {
-
-        private final BufferedWriter writer;
-
-        public FileAppender(String filePath, boolean createIfNotExists, boolean overwrite) throws IOException {
-            File file = new File(filePath);
-            if (!file.exists()) {
-                if (createIfNotExists) {
-                    new File(file.getParent()).mkdirs();
-                } else {
-                    throw new IOException("path " + filePath + " does not exists");
-                }
-            }
-            this.writer = new BufferedWriter(new FileWriter(file, !overwrite));
-        }
-
-        public void appendToFile(String content) throws IOException {
-            writer.append(content);
-            writer.append("\n");
-        }
-
-        public void close() throws IOException {
-            writer.close();
-        }
-    }
-
-    public static class RandomEmploymentGenerator {
-
-        private final int percentEmployed;
-        private final Random random = new Random();
-        private final RandomDateGenerator randDateGen;
-        private final List<String> organizations;
-        private Employment emp;
-
-        public RandomEmploymentGenerator(int percentEmployed, Date beginEmpDate, Date endEmpDate, String orgListPath)
-                throws IOException {
-            this.percentEmployed = percentEmployed;
-            this.randDateGen = new RandomDateGenerator(beginEmpDate, endEmpDate);
-            organizations = FileUtil.listyFile(new File(orgListPath));
-            emp = new Employment();
-        }
-
-        public RandomEmploymentGenerator(int percentEmployed, Date beginEmpDate, Date endEmpDate, String[] orgList) {
-            this.percentEmployed = percentEmployed;
-            this.randDateGen = new RandomDateGenerator(beginEmpDate, endEmpDate);
-            organizations = new ArrayList<String>();
-            for (String org : orgList) {
-                organizations.add(org);
-            }
-            emp = new Employment();
-        }
-
-        public Employment getRandomEmployment() {
-            int empployed = random.nextInt(100) + 1;
-            boolean isEmployed = false;
-            if (empployed <= percentEmployed) {
-                isEmployed = true;
-            }
-            Date startDate = randDateGen.getNextRandomDate();
-            Date endDate = null;
-            if (!isEmployed) {
-                endDate = randDateGen.getNextRecentDate(startDate);
-            }
-            String org = organizations.get(random.nextInt(organizations.size()));
-            emp.reset(org, startDate, endDate);
-            return emp;
-        }
-    }
-
     public static class RandomLocationGenerator {
 
+        private Random random = new Random();
+
         private final int beginLat;
         private final int endLat;
         private final int beginLong;
         private final int endLong;
 
-        private Random random = new Random();
         private Point point;
 
         public RandomLocationGenerator(int beginLat, int endLat, int beginLong, int endLong) {
@@ -862,417 +473,6 @@
 
     }
 
-    public static class PartitionConfiguration {
-
-        private final TargetPartition targetPartition;
-        private final SourcePartition sourcePartition;
-
-        public PartitionConfiguration(SourcePartition sourcePartition, TargetPartition targetPartition) {
-            this.sourcePartition = sourcePartition;
-            this.targetPartition = targetPartition;
-        }
-
-        public TargetPartition getTargetPartition() {
-            return targetPartition;
-        }
-
-        public SourcePartition getSourcePartition() {
-            return sourcePartition;
-        }
-
-    }
-
-    public static class SourcePartition {
-
-        private final String name;
-        private final String host;
-        private final String path;
-
-        public SourcePartition(String name, String host, String path) {
-            this.name = name;
-            this.host = host;
-            this.path = path;
-        }
-
-        public String getName() {
-            return name;
-        }
-
-        public String getHost() {
-            return host;
-        }
-
-        public String getPath() {
-            return path;
-        }
-    }
-
-    public static class TargetPartition {
-        private final String name;
-        private final String host;
-        private final String path;
-        private final int fbUserKeyMin;
-        private final int fbUserKeyMax;
-        private final int twUserKeyMin;
-        private final int twUserKeyMax;
-        private final int fbMessageIdMin;
-        private final int fbMessageIdMax;
-        private final int twMessageIdMin;
-        private final int twMessageIdMax;
-        private final int commonUsers;
-
-        public TargetPartition(String partitionName, String host, String path, int fbUserKeyMin, int fbUserKeyMax,
-                int twUserKeyMin, int twUserKeyMax, int fbMessageIdMin, int fbMessageIdMax, int twMessageIdMin,
-                int twMessageIdMax, int commonUsers) {
-            this.name = partitionName;
-            this.host = host;
-            this.path = path;
-            this.fbUserKeyMin = fbUserKeyMin;
-            this.fbUserKeyMax = fbUserKeyMax;
-            this.twUserKeyMin = twUserKeyMin;
-            this.twUserKeyMax = twUserKeyMax;
-            this.twMessageIdMin = twMessageIdMin;
-            this.twMessageIdMax = twMessageIdMax;
-            this.fbMessageIdMin = fbMessageIdMin;
-            this.fbMessageIdMax = fbMessageIdMax;
-            this.commonUsers = commonUsers;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append(name);
-            builder.append(" ");
-            builder.append(host);
-            builder.append("\n");
-            builder.append(path);
-            builder.append("\n");
-            builder.append("fbUser:key:min");
-            builder.append(fbUserKeyMin);
-
-            builder.append("\n");
-            builder.append("fbUser:key:max");
-            builder.append(fbUserKeyMax);
-
-            builder.append("\n");
-            builder.append("twUser:key:min");
-            builder.append(twUserKeyMin);
-
-            builder.append("\n");
-            builder.append("twUser:key:max");
-            builder.append(twUserKeyMax);
-
-            builder.append("\n");
-            builder.append("fbMessage:key:min");
-            builder.append(fbMessageIdMin);
-
-            builder.append("\n");
-            builder.append("fbMessage:key:max");
-            builder.append(fbMessageIdMax);
-
-            builder.append("\n");
-            builder.append("twMessage:key:min");
-            builder.append(twMessageIdMin);
-
-            builder.append("\n");
-            builder.append("twMessage:key:max");
-            builder.append(twMessageIdMax);
-
-            builder.append("\n");
-            builder.append("twMessage:key:max");
-            builder.append(twMessageIdMax);
-
-            builder.append("\n");
-            builder.append("commonUsers");
-            builder.append(commonUsers);
-
-            return new String(builder);
-        }
-
-        public String getName() {
-            return name;
-        }
-
-        public String getHost() {
-            return host;
-        }
-
-        public int getFbUserKeyMin() {
-            return fbUserKeyMin;
-        }
-
-        public int getFbUserKeyMax() {
-            return fbUserKeyMax;
-        }
-
-        public int getTwUserKeyMin() {
-            return twUserKeyMin;
-        }
-
-        public int getTwUserKeyMax() {
-            return twUserKeyMax;
-        }
-
-        public int getFbMessageIdMin() {
-            return fbMessageIdMin;
-        }
-
-        public int getFbMessageIdMax() {
-            return fbMessageIdMax;
-        }
-
-        public int getTwMessageIdMin() {
-            return twMessageIdMin;
-        }
-
-        public int getTwMessageIdMax() {
-            return twMessageIdMax;
-        }
-
-        public int getCommonUsers() {
-            return commonUsers;
-        }
-
-        public String getPath() {
-            return path;
-        }
-    }
-
-    public static class Employment {
-
-        private String organization;
-        private Date startDate;
-        private Date endDate;
-
-        public Employment(String organization, Date startDate, Date endDate) {
-            this.organization = organization;
-            this.startDate = startDate;
-            this.endDate = endDate;
-        }
-
-        public Employment() {
-        }
-
-        public void reset(String organization, Date startDate, Date endDate) {
-            this.organization = organization;
-            this.startDate = startDate;
-            this.endDate = endDate;
-        }
-
-        public String getOrganization() {
-            return organization;
-        }
-
-        public Date getStartDate() {
-            return startDate;
-        }
-
-        public Date getEndDate() {
-            return endDate;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder("");
-            builder.append("{");
-            builder.append("\"organization-name\":");
-            builder.append("\"" + organization + "\"");
-            builder.append(",");
-            builder.append("\"start-date\":");
-            builder.append(startDate);
-            if (endDate != null) {
-                builder.append(",");
-                builder.append("\"end-date\":");
-                builder.append(endDate);
-            }
-            builder.append("}");
-            return new String(builder);
-        }
-
-    }
-
-    public static class FacebookMessage {
-
-        private int messageId;
-        private int authorId;
-        private int inResponseTo;
-        private Point senderLocation;
-        private Message message;
-
-        public int getMessageId() {
-            return messageId;
-        }
-
-        public int getAuthorID() {
-            return authorId;
-        }
-
-        public Point getSenderLocation() {
-            return senderLocation;
-        }
-
-        public Message getMessage() {
-            return message;
-        }
-
-        public int getInResponseTo() {
-            return inResponseTo;
-        }
-
-        public FacebookMessage() {
-
-        }
-
-        public FacebookMessage(int messageId, int authorId, int inResponseTo, Point senderLocation, Message message) {
-            this.messageId = messageId;
-            this.authorId = authorId;
-            this.inResponseTo = inResponseTo;
-            this.senderLocation = senderLocation;
-            this.message = message;
-        }
-
-        public void reset(int messageId, int authorId, int inResponseTo, Point senderLocation, Message message) {
-            this.messageId = messageId;
-            this.authorId = authorId;
-            this.inResponseTo = inResponseTo;
-            this.senderLocation = senderLocation;
-            this.message = message;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("{");
-            builder.append("\"message-id\":");
-            builder.append(messageId);
-            builder.append(",");
-            builder.append("\"author-id\":");
-            builder.append(authorId);
-            builder.append(",");
-            builder.append("\"in-response-to\":");
-            builder.append(inResponseTo);
-            builder.append(",");
-            builder.append("\"sender-location\":");
-            builder.append(senderLocation);
-            builder.append(",");
-            builder.append("\"message\":");
-            builder.append("\"");
-            for (int i = 0; i < message.getLength(); i++) {
-                builder.append(message.charAt(i));
-            }
-            builder.append("\"");
-            builder.append("}");
-            return new String(builder);
-        }
-    }
-
-    public static class FacebookUser {
-
-        private int id;
-        private String alias;
-        private String name;
-        private String userSince;
-        private int[] friendIds;
-        private Employment employment;
-
-        public FacebookUser() {
-
-        }
-
-        public FacebookUser(int id, String alias, String name, String userSince, int[] friendIds, Employment employment) {
-            this.id = id;
-            this.alias = alias;
-            this.name = name;
-            this.userSince = userSince;
-            this.friendIds = friendIds;
-            this.employment = employment;
-        }
-
-        public int getId() {
-            return id;
-        }
-
-        public String getAlias() {
-            return alias;
-        }
-
-        public String getName() {
-            return name;
-        }
-
-        public String getUserSince() {
-            return userSince;
-        }
-
-        public int[] getFriendIds() {
-            return friendIds;
-        }
-
-        public Employment getEmployment() {
-            return employment;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append("{");
-            builder.append("\"id\":" + id);
-            builder.append(",");
-            builder.append("\"alias\":" + "\"" + alias + "\"");
-            builder.append(",");
-            builder.append("\"name\":" + "\"" + name + "\"");
-            builder.append(",");
-            builder.append("\"user-since\":" + userSince);
-            builder.append(",");
-            builder.append("\"friend-ids\":");
-            builder.append("{{");
-            for (int i = 0; i < friendIds.length; i++) {
-                builder.append(friendIds[i]);
-                builder.append(",");
-            }
-            if (friendIds.length > 0) {
-                builder.deleteCharAt(builder.lastIndexOf(","));
-            }
-            builder.append("}}");
-            builder.append(",");
-            builder.append("\"employment\":");
-            builder.append("[");
-            builder.append(employment.toString());
-            builder.append("]");
-            builder.append("}");
-            return builder.toString();
-        }
-
-        public void setId(int id) {
-            this.id = id;
-        }
-
-        public void setAlias(String alias) {
-            this.alias = alias;
-        }
-
-        public void setName(String name) {
-            this.name = name;
-        }
-
-        public void setUserSince(String userSince) {
-            this.userSince = userSince;
-        }
-
-        public void setFriendIds(int[] friendIds) {
-            this.friendIds = friendIds;
-        }
-
-        public void setEmployment(Employment employment) {
-            this.employment = employment;
-        }
-
-        public void reset(int id, String alias, String name, String userSince, int[] friendIds, Employment employment) {
-            this.id = id;
-            this.alias = alias;
-            this.name = name;
-            this.userSince = userSince;
-            this.friendIds = friendIds;
-            this.employment = employment;
-        }
-    }
-
     public static class TweetMessage {
 
         private long tweetid;
@@ -1283,7 +483,6 @@
         private Message messageText;
 
         public TweetMessage() {
-
         }
 
         public TweetMessage(long tweetid, TwitterUser user, Point senderLocation, DateTime sendTime,
@@ -1461,478 +660,6 @@
 
     }
 
-    public static class DistributionHandler {
-
-        private final ZipfGenerator zipfGen;
-        private final int totalUsers;
-        private final int totalMessages;
-        private Random random = new Random();
-
-        public DistributionHandler(int totalMessages, double skew, int totalNumUsers) {
-            zipfGen = new ZipfGenerator(totalMessages, skew);
-            totalUsers = totalNumUsers;
-            this.totalMessages = totalMessages;
-        }
-
-        public int getFromDistribution(int rank) {
-            double prob = zipfGen.getProbability(rank);
-            int numMessages = (int) (prob * totalMessages);
-            return numMessages;
-        }
-
-        public static void main(String args[]) {
-            int totalMessages = 1000 * 4070;
-            double skew = 0.5;
-            int totalUsers = 4070;
-            DistributionHandler d = new DistributionHandler(totalMessages, skew, totalUsers);
-            int sum = 0;
-            for (int i = totalUsers; i >= 1; i--) {
-                float contrib = d.getFromDistribution(i);
-                sum += contrib;
-                System.out.println(i + ":" + contrib);
-            }
-
-            System.out.println("SUM" + ":" + sum);
-
-        }
-    }
-
-    public static class ZipfGenerator {
-
-        private Random rnd = new Random(System.currentTimeMillis());
-        private int size;
-        private double skew;
-        private double bottom = 0;
-
-        public ZipfGenerator(int size, double skew) {
-            this.size = size;
-            this.skew = skew;
-            for (int i = 1; i < size; i++) {
-                this.bottom += (1 / Math.pow(i, this.skew));
-            }
-        }
-
-        // the next() method returns an rank id. The frequency of returned rank
-        // ids are follows Zipf distribution.
-        public int next() {
-            int rank;
-            double friquency = 0;
-            double dice;
-            rank = rnd.nextInt(size);
-            friquency = (1.0d / Math.pow(rank, this.skew)) / this.bottom;
-            dice = rnd.nextDouble();
-            while (!(dice < friquency)) {
-                rank = rnd.nextInt(size);
-                friquency = (1.0d / Math.pow(rank, this.skew)) / this.bottom;
-                dice = rnd.nextDouble();
-            }
-            return rank;
-        }
-
-        // This method returns a probability that the given rank occurs.
-        public double getProbability(int rank) {
-            return (1.0d / Math.pow(rank, this.skew)) / this.bottom;
-        }
-
-        public static void main(String[] args) throws IOException {
-            int total = (int) (3.7 * 1000 * 1000);
-            int skew = 2;
-            int numUsers = 1000 * 1000;
-            /*
-             * if (args.length != 2) { System.out.println("usage:" +
-             * "./zipf size skew"); System.exit(-1); }
-             */
-            BufferedWriter buf = new BufferedWriter(new FileWriter(new File("/tmp/zip_output")));
-            ZipfGenerator zipf = new ZipfGenerator(total, skew);
-            double sum = 0;
-            for (int i = 1; i <= numUsers; i++) {
-                double prob = zipf.getProbability(i);
-                double contribution = (double) (prob * total);
-                String contrib = i + ":" + contribution;
-                buf.write(contrib);
-                buf.write("\n");
-                System.out.println(contrib);
-                sum += contribution;
-            }
-            System.out.println("sum is :" + sum);
-        }
-    }
-
-    public static class PartitionElement implements ILibraryElement {
-        private final String name;
-        private final String host;
-        private final int fbUserKeyMin;
-        private final int fbUserKeyMax;
-        private final int twUserKeyMin;
-        private final int twUserKeyMax;
-        private final int fbMessageIdMin;
-        private final int fbMessageIdMax;
-        private final int twMessageIdMin;
-        private final int twMessageIdMax;
-
-        public PartitionElement(String partitionName, String host, int fbUserKeyMin, int fbUserKeyMax,
-                int twUserKeyMin, int twUserKeyMax, int fbMessageIdMin, int fbMessageIdMax, int twMessageIdMin,
-                int twMessageIdMax) {
-            this.name = partitionName;
-            this.host = host;
-            this.fbUserKeyMin = fbUserKeyMin;
-            this.fbUserKeyMax = fbUserKeyMax;
-            this.twUserKeyMin = twUserKeyMax;
-            this.twUserKeyMax = twUserKeyMax;
-            this.twMessageIdMin = twMessageIdMin;
-            this.twMessageIdMax = twMessageIdMax;
-            this.fbMessageIdMin = fbMessageIdMin;
-            this.fbMessageIdMax = fbMessageIdMax;
-        }
-
-        public String toString() {
-            StringBuilder builder = new StringBuilder();
-            builder.append(name);
-            builder.append(" ");
-            builder.append(host);
-            builder.append("\n");
-            builder.append("fbUser:key:min");
-            builder.append(fbUserKeyMin);
-
-            builder.append("\n");
-            builder.append("fbUser:key:max");
-            builder.append(fbUserKeyMax);
-
-            builder.append("\n");
-            builder.append("twUser:key:min");
-            builder.append(twUserKeyMin);
-
-            builder.append("\n");
-            builder.append("twUser:key:max");
-            builder.append(twUserKeyMax);
-
-            builder.append("\n");
-            builder.append("fbMessage:key:min");
-            builder.append(fbMessageIdMin);
-
-            builder.append("\n");
-            builder.append("fbMessage:key:max");
-            builder.append(fbMessageIdMax);
-
-            builder.append("\n");
-            builder.append("twMessage:key:min");
-            builder.append(twMessageIdMin);
-
-            builder.append("\n");
-            builder.append("twMessage:key:max");
-            builder.append(twMessageIdMax);
-
-            builder.append("\n");
-            builder.append("twMessage:key:max");
-            builder.append(twUserKeyMin);
-
-            return new String(builder);
-        }
-
-        @Override
-        public String getName() {
-            return "Partition";
-        }
-
-    }
-
-    interface ILibraryElement {
-
-        public enum ElementType {
-            PARTITION
-        }
-
-        public String getName();
-
-    }
-
-    public static class Configuration {
-
-        private final float numMB;
-        private final String unit;
-
-        private final List<SourcePartition> sourcePartitions;
-        private List<TargetPartition> targetPartitions;
-
-        public Configuration(float numMB, String unit, List<SourcePartition> partitions) throws IOException {
-            this.numMB = numMB;
-            this.unit = unit;
-            this.sourcePartitions = partitions;
-
-        }
-
-        public float getNumMB() {
-            return numMB;
-        }
-
-        public String getUnit() {
-            return unit;
-        }
-
-        public List<SourcePartition> getSourcePartitions() {
-            return sourcePartitions;
-        }
-
-        public List<TargetPartition> getTargetPartitions() {
-            return targetPartitions;
-        }
-
-        public void setTargetPartitions(List<TargetPartition> targetPartitions) {
-            this.targetPartitions = targetPartitions;
-        }
-
-    }
-
-    public static class XMLUtil {
-
-        public static void writeToXML(Configuration conf, String filePath) throws IOException,
-                ParserConfigurationException, TransformerException {
-
-            DocumentBuilderFactory docFactory = DocumentBuilderFactory.newInstance();
-            DocumentBuilder docBuilder = docFactory.newDocumentBuilder();
-
-            // root elements
-            Document doc = docBuilder.newDocument();
-            Element rootElement = doc.createElement("Partitions");
-            doc.appendChild(rootElement);
-
-            int index = 0;
-            for (TargetPartition partition : conf.getTargetPartitions()) {
-                writePartitionElement(conf.getSourcePartitions().get(index), partition, rootElement, doc);
-            }
-
-            TransformerFactory transformerFactory = TransformerFactory.newInstance();
-            Transformer transformer = transformerFactory.newTransformer();
-
-            transformer.setOutputProperty(OutputKeys.ENCODING, "utf-8");
-            transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "no");
-            transformer.setOutputProperty(OutputKeys.INDENT, "yes");
-            transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
-
-            DOMSource source = new DOMSource(doc);
-            StreamResult result = new StreamResult(new File(filePath));
-
-            transformer.transform(source, result);
-
-        }
-
-        public static void writePartitionInfo(Configuration conf, String filePath) throws IOException {
-            BufferedWriter bw = new BufferedWriter(new FileWriter(filePath));
-            for (SourcePartition sp : conf.getSourcePartitions()) {
-                bw.write(sp.getHost() + ":" + sp.getName() + ":" + sp.getPath());
-                bw.write("\n");
-            }
-            bw.close();
-        }
-
-        public static Document getDocument(String filePath) throws Exception {
-            File inputFile = new File(filePath);
-            DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
-            DocumentBuilder db = dbf.newDocumentBuilder();
-            Document doc = db.parse(inputFile);
-            doc.getDocumentElement().normalize();
-            return doc;
-        }
-
-        public static Configuration getConfiguration(String filePath) throws Exception {
-            Configuration conf = getConfiguration(getDocument(filePath));
-            PartitionMetrics metrics = new PartitionMetrics(conf.getNumMB(), conf.getUnit(), conf.getSourcePartitions()
-                    .size());
-            List<TargetPartition> targetPartitions = getTargetPartitions(metrics, conf.getSourcePartitions());
-            conf.setTargetPartitions(targetPartitions);
-            return conf;
-        }
-
-        public static Configuration getConfiguration(Document document) throws IOException {
-            Element rootEle = document.getDocumentElement();
-            NodeList nodeList = rootEle.getChildNodes();
-            float size = Float.parseFloat(getStringValue((Element) nodeList, "size"));
-            String unit = getStringValue((Element) nodeList, "unit");
-            List<SourcePartition> sourcePartitions = getSourcePartitions(document);
-            return new Configuration(size, unit, sourcePartitions);
-        }
-
-        public static List<SourcePartition> getSourcePartitions(Document document) {
-            Element rootEle = document.getDocumentElement();
-            NodeList nodeList = rootEle.getElementsByTagName("partition");
-            List<SourcePartition> sourcePartitions = new ArrayList<SourcePartition>();
-            for (int i = 0; i < nodeList.getLength(); i++) {
-                Node node = nodeList.item(i);
-                sourcePartitions.add(getSourcePartition((Element) node));
-            }
-            return sourcePartitions;
-        }
-
-        public static SourcePartition getSourcePartition(Element functionElement) {
-            String name = getStringValue(functionElement, "name");
-            String host = getStringValue(functionElement, "host");
-            String path = getStringValue(functionElement, "path");
-            SourcePartition sp = new SourcePartition(name, host, path);
-            return sp;
-        }
-
-        public static String getStringValue(Element element, String tagName) {
-            String textValue = null;
-            NodeList nl = element.getElementsByTagName(tagName);
-            if (nl != null && nl.getLength() > 0) {
-                Element el = (Element) nl.item(0);
-                textValue = el.getFirstChild().getNodeValue();
-            }
-            return textValue;
-        }
-
-        public static PartitionConfiguration getPartitionConfiguration(String filePath, String partitionName)
-                throws Exception {
-            PartitionConfiguration pconf = getPartitionConfigurations(getDocument(filePath), partitionName);
-            return pconf;
-        }
-
-        public static PartitionConfiguration getPartitionConfigurations(Document document, String partitionName)
-                throws IOException {
-
-            Element rootEle = document.getDocumentElement();
-            NodeList nodeList = rootEle.getElementsByTagName("Partition");
-
-            for (int i = 0; i < nodeList.getLength(); i++) {
-                Node node = nodeList.item(i);
-                Element nodeElement = (Element) node;
-                String name = getStringValue(nodeElement, "name");
-                if (!name.equalsIgnoreCase(partitionName)) {
-                    continue;
-                }
-                String host = getStringValue(nodeElement, "host");
-                String path = getStringValue(nodeElement, "path");
-
-                String fbUserKeyMin = getStringValue(nodeElement, "fbUserKeyMin");
-                String fbUserKeyMax = getStringValue(nodeElement, "fbUserKeyMax");
-                String twUserKeyMin = getStringValue(nodeElement, "twUserKeyMin");
-                String twUserKeyMax = getStringValue(nodeElement, "twUserKeyMax");
-                String fbMessageKeyMin = getStringValue(nodeElement, "fbMessageKeyMin");
-
-                String fbMessageKeyMax = getStringValue(nodeElement, "fbMessageKeyMax");
-                String twMessageKeyMin = getStringValue(nodeElement, "twMessageKeyMin");
-                String twMessageKeyMax = getStringValue(nodeElement, "twMessageKeyMax");
-                String numCommonUsers = getStringValue(nodeElement, "numCommonUsers");
-
-                SourcePartition sp = new SourcePartition(name, host, path);
-
-                TargetPartition tp = new TargetPartition(partitionName, host, path, Integer.parseInt(fbUserKeyMin),
-                        Integer.parseInt(fbUserKeyMax), Integer.parseInt(twUserKeyMin), Integer.parseInt(twUserKeyMax),
-                        Integer.parseInt(fbMessageKeyMin), Integer.parseInt(fbMessageKeyMax),
-                        Integer.parseInt(twMessageKeyMin), Integer.parseInt(twMessageKeyMax),
-                        Integer.parseInt(numCommonUsers));
-                PartitionConfiguration pc = new PartitionConfiguration(sp, tp);
-                return pc;
-            }
-            return null;
-        }
-
-        public static List<TargetPartition> getTargetPartitions(PartitionMetrics metrics,
-                List<SourcePartition> sourcePartitions) {
-            List<TargetPartition> partitions = new ArrayList<TargetPartition>();
-            int fbUserKeyMin = 1;
-            int twUserKeyMin = 1;
-            int fbMessageIdMin = 1;
-            int twMessageIdMin = 1;
-
-            for (SourcePartition sp : sourcePartitions) {
-                int fbUserKeyMax = fbUserKeyMin + metrics.getFbOnlyUsers() + metrics.getCommonUsers() - 1;
-                int twUserKeyMax = twUserKeyMin + metrics.getTwitterOnlyUsers() + metrics.getCommonUsers() - 1;
-
-                int fbMessageIdMax = fbMessageIdMin + metrics.getFbMessages() - 1;
-                int twMessageIdMax = twMessageIdMin + metrics.getTwMessages() - 1;
-                TargetPartition pe = new TargetPartition(sp.getName(), sp.getHost(), sp.getPath(), fbUserKeyMin,
-                        fbUserKeyMax, twUserKeyMin, twUserKeyMax, fbMessageIdMin, fbMessageIdMax, twMessageIdMin,
-                        twMessageIdMax, metrics.getCommonUsers());
-                partitions.add(pe);
-
-                fbUserKeyMin = fbUserKeyMax + 1;
-                twUserKeyMin = twUserKeyMax + 1;
-
-                fbMessageIdMin = fbMessageIdMax + 1;
-                twMessageIdMin = twMessageIdMax + 1;
-            }
-
-            return partitions;
-        }
-
-        public static void writePartitionElement(SourcePartition sourcePartition, TargetPartition partition,
-                Element rootElement, Document doc) {
-            // staff elements
-            Element pe = doc.createElement("Partition");
-            rootElement.appendChild(pe);
-
-            // name element
-            Element name = doc.createElement("name");
-            name.appendChild(doc.createTextNode("" + partition.getName()));
-            pe.appendChild(name);
-
-            // host element
-            Element host = doc.createElement("host");
-            host.appendChild(doc.createTextNode("" + partition.getHost()));
-            pe.appendChild(host);
-
-            // path element
-            Element path = doc.createElement("path");
-            path.appendChild(doc.createTextNode("" + partition.getPath()));
-            pe.appendChild(path);
-
-            // fbUserKeyMin element
-            Element fbUserKeyMin = doc.createElement("fbUserKeyMin");
-            fbUserKeyMin.appendChild(doc.createTextNode("" + partition.getFbUserKeyMin()));
-            pe.appendChild(fbUserKeyMin);
-
-            // fbUserKeyMax element
-            Element fbUserKeyMax = doc.createElement("fbUserKeyMax");
-            fbUserKeyMax.appendChild(doc.createTextNode("" + partition.getFbUserKeyMax()));
-            pe.appendChild(fbUserKeyMax);
-
-            // twUserKeyMin element
-            Element twUserKeyMin = doc.createElement("twUserKeyMin");
-            twUserKeyMin.appendChild(doc.createTextNode("" + partition.getTwUserKeyMin()));
-            pe.appendChild(twUserKeyMin);
-
-            // twUserKeyMax element
-            Element twUserKeyMax = doc.createElement("twUserKeyMax");
-            twUserKeyMax.appendChild(doc.createTextNode("" + partition.getTwUserKeyMax()));
-            pe.appendChild(twUserKeyMax);
-
-            // fbMessgeKeyMin element
-            Element fbMessageKeyMin = doc.createElement("fbMessageKeyMin");
-            fbMessageKeyMin.appendChild(doc.createTextNode("" + partition.getFbMessageIdMin()));
-            pe.appendChild(fbMessageKeyMin);
-
-            // fbMessgeKeyMin element
-            Element fbMessageKeyMax = doc.createElement("fbMessageKeyMax");
-            fbMessageKeyMax.appendChild(doc.createTextNode("" + partition.getFbMessageIdMax()));
-            pe.appendChild(fbMessageKeyMax);
-
-            // twMessgeKeyMin element
-            Element twMessageKeyMin = doc.createElement("twMessageKeyMin");
-            twMessageKeyMin.appendChild(doc.createTextNode("" + partition.getTwMessageIdMin()));
-            pe.appendChild(twMessageKeyMin);
-
-            // twMessgeKeyMin element
-            Element twMessageKeyMax = doc.createElement("twMessageKeyMax");
-            twMessageKeyMax.appendChild(doc.createTextNode("" + partition.getTwMessageIdMax()));
-            pe.appendChild(twMessageKeyMax);
-
-            // twMessgeKeyMin element
-            Element numCommonUsers = doc.createElement("numCommonUsers");
-            numCommonUsers.appendChild(doc.createTextNode("" + partition.getCommonUsers()));
-            pe.appendChild(numCommonUsers);
-
-        }
-
-        public static void main(String args[]) throws Exception {
-            String confFile = "/Users/rgrove1/work/research/asterix/icde/data-gen/conf/conf.xml";
-            String outputPath = "/Users/rgrove1/work/research/asterix/icde/data-gen/output/conf-output.xml";
-            Configuration conf = getConfiguration(confFile);
-            writeToXML(conf, outputPath);
-        }
-
-    }
-
     public static class Date {
 
         private int day;
@@ -1992,57 +719,6 @@
         }
     }
 
-    public static class PartitionMetrics {
-
-        private final int fbMessages;
-        private final int twMessages;
-
-        private final int fbOnlyUsers;
-        private final int twitterOnlyUsers;
-        private final int commonUsers;
-
-        public PartitionMetrics(float number, String unit, int numPartitions) throws IOException {
-
-            int factor = 0;
-            if (unit.equalsIgnoreCase("MB")) {
-                factor = 1024 * 1024;
-            } else if (unit.equalsIgnoreCase("GB")) {
-                factor = 1024 * 1024 * 1024;
-            } else if (unit.equalsIgnoreCase("TB")) {
-                factor = 1024 * 1024 * 1024 * 1024;
-            } else
-                throw new IOException("Invalid unit:" + unit);
-
-            fbMessages = (int) (((number * factor * 0.80) / 258) / numPartitions);
-            twMessages = (int) (fbMessages * 1.1 / 0.35);
-
-            fbOnlyUsers = (int) ((number * factor * 0.20 * 0.0043)) / numPartitions;
-            twitterOnlyUsers = (int) (0.25 * fbOnlyUsers);
-            commonUsers = (int) (0.1 * fbOnlyUsers);
-        }
-
-        public int getFbMessages() {
-            return fbMessages;
-        }
-
-        public int getTwMessages() {
-            return twMessages;
-        }
-
-        public int getFbOnlyUsers() {
-            return fbOnlyUsers;
-        }
-
-        public int getTwitterOnlyUsers() {
-            return twitterOnlyUsers;
-        }
-
-        public int getCommonUsers() {
-            return commonUsers;
-        }
-
-    }
-
     public static String[] lastNames = { "Hoopengarner", "Harrow", "Gardner", "Blyant", "Best", "Buttermore", "Gronko",
             "Mayers", "Countryman", "Neely", "Ruhl", "Taggart", "Bash", "Cason", "Hil", "Zalack", "Mingle", "Carr",
             "Rohtin", "Wardle", "Pullman", "Wire", "Kellogg", "Hiles", "Keppel", "Bratton", "Sutton", "Wickes",
@@ -2482,4 +1158,5 @@
             "Lexicone", "Fax-fax", "Viatechi", "Inchdox", "Kongreen", "Doncare", "Y-geohex", "Opeelectronics",
             "Medflex", "Dancode", "Roundhex", "Labzatron", "Newhotplus", "Sancone", "Ronholdings", "Quoline",
             "zoomplus", "Fix-touch", "Codetechno", "Tanzumbam", "Indiex", "Canline" };
-}
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java
index 4cdda1e..20b9be1 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapter.java
@@ -4,9 +4,7 @@
 import java.io.InputStream;
 import java.net.ServerSocket;
 import java.net.Socket;
-import java.util.Map;
 import java.util.logging.Level;
-import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.common.exceptions.AsterixException;
 import edu.uci.ics.asterix.external.dataset.adapter.StreamBasedAdapter;
@@ -20,23 +18,12 @@
 
     private static final long serialVersionUID = 1L;
 
-    public static final String KEY_PORT = "port";
-
-    private static final Logger LOGGER = Logger.getLogger(GenericSocketFeedAdapter.class.getName());
-
-    private Map<String, String> configuration;
-
     private SocketFeedServer socketFeedServer;
 
-    private static final int DEFAULT_PORT = 2909;
-
-    public GenericSocketFeedAdapter(Map<String, String> configuration, ITupleParserFactory parserFactory,
-            ARecordType outputtype, IHyracksTaskContext ctx) throws AsterixException, IOException {
+    public GenericSocketFeedAdapter(ITupleParserFactory parserFactory, ARecordType outputtype, int port,
+            IHyracksTaskContext ctx) throws AsterixException, IOException {
         super(parserFactory, outputtype, ctx);
-        this.configuration = configuration;
-        String portValue = (String) this.configuration.get(KEY_PORT);
-        int port = portValue != null ? Integer.parseInt(portValue) : DEFAULT_PORT;
-        this.socketFeedServer = new SocketFeedServer(configuration, outputtype, port);
+        this.socketFeedServer = new SocketFeedServer(outputtype, port);
     }
 
     @Override
@@ -53,8 +40,7 @@
         private ServerSocket serverSocket;
         private InputStream inputStream;
 
-        public SocketFeedServer(Map<String, String> configuration, ARecordType outputtype, int port)
-                throws IOException, AsterixException {
+        public SocketFeedServer(ARecordType outputtype, int port) throws IOException, AsterixException {
             try {
                 serverSocket = new ServerSocket(port);
             } catch (Exception e) {
@@ -70,19 +56,27 @@
         public InputStream getInputStream() {
             Socket socket;
             try {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("waiting for client at " + serverSocket.getLocalPort());
+                }
                 socket = serverSocket.accept();
                 inputStream = socket.getInputStream();
             } catch (IOException e) {
                 if (LOGGER.isLoggable(Level.SEVERE)) {
                     LOGGER.severe("Unable to create input stream required for feed ingestion");
                 }
-                e.printStackTrace();
             }
             return inputStream;
         }
 
         public void stop() throws IOException {
-            serverSocket.close();
+            try {
+                serverSocket.close();
+            } catch (IOException ioe) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unable to close socket at " + serverSocket.getLocalPort());
+                }
+            }
         }
 
     }
@@ -92,4 +86,8 @@
         socketFeedServer.stop();
     }
 
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PUSH;
+    }
+
 }
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java
index 03c65c7..ce14bb2 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/GenericSocketFeedAdapterFactory.java
@@ -1,5 +1,5 @@
 /*
-x * Copyright 2009-2012 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -14,34 +14,51 @@
  */
 package edu.uci.ics.asterix.tools.external.data;
 
+import java.util.ArrayList;
+import java.util.List;
 import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+
+import org.apache.commons.lang3.StringUtils;
 
 import edu.uci.ics.asterix.external.adapter.factory.StreamBasedAdapterFactory;
 import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
 import edu.uci.ics.asterix.metadata.feeds.IGenericAdapterFactory;
 import edu.uci.ics.asterix.om.types.ARecordType;
-import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
+import edu.uci.ics.asterix.om.util.AsterixRuntimeUtil;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
 /**
  * Factory class for creating @see{GenericSocketFeedAdapter} The
  * adapter listens at a port for receiving data (from external world).
- * Data received is transformed into Asterix Data Format (ADM) and stored into
- * a dataset a configured in the Adapter configuration.
+ * Data received is transformed into Asterix Data Format (ADM).
  */
 public class GenericSocketFeedAdapterFactory extends StreamBasedAdapterFactory implements IGenericAdapterFactory {
 
-    /**
-     * 
-     */
     private static final long serialVersionUID = 1L;
 
     private ARecordType outputType;
 
+    private List<Pair<String, Integer>> sockets;
+
+    private Mode mode = Mode.IP;
+
+    public static final String KEY_SOCKETS = "sockets";
+
+    public static final String KEY_MODE = "address-type";
+
+    public static enum Mode {
+        NC,
+        IP
+    }
+
     @Override
     public String getName() {
-        return "generic_socket_feed";
+        return "socket_adaptor";
     }
 
     @Override
@@ -54,21 +71,76 @@
         return SupportedOperation.READ;
     }
 
+    public List<Pair<String, Integer>> getSockets() {
+        return sockets;
+    }
+
     @Override
     public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
         this.configuration = configuration;
         outputType = (ARecordType) outputType;
         this.configureFormat(outputType);
+        this.configureSockets(configuration);
     }
 
     @Override
     public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
-        return new AlgebricksCountPartitionConstraint(1);
+        List<String> locations = new ArrayList<String>();
+        for (Pair<String, Integer> socket : sockets) {
+            locations.add(socket.first);
+        }
+        return new AlgebricksAbsolutePartitionConstraint(locations.toArray(new String[] {}));
     }
 
     @Override
     public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        return new GenericSocketFeedAdapter(configuration, parserFactory, outputType, ctx);
+        Pair<String, Integer> socket = sockets.get(partition);
+        return new GenericSocketFeedAdapter(parserFactory, outputType, socket.second, ctx);
     }
 
+    private void configureSockets(Map<String, String> configuration) throws Exception {
+        sockets = new ArrayList<Pair<String, Integer>>();
+        String modeValue = configuration.get(KEY_MODE);
+        if (modeValue != null) {
+            mode = Mode.valueOf(modeValue.trim().toUpperCase());
+        }
+        String socketsValue = configuration.get(KEY_SOCKETS);
+        if (socketsValue == null) {
+            throw new IllegalArgumentException("\'sockets\' parameter not specified as part of adaptor configuration");
+        }
+        Map<String, Set<String>> ncMap = AsterixRuntimeUtil.getNodeControllerMap();
+        List<String> ncs = AsterixRuntimeUtil.getAllNodeControllers();
+        String[] socketsArray = socketsValue.split(",");
+        Random random = new Random();
+        for (String socket : socketsArray) {
+            String[] socketTokens = socket.split(":");
+            String host = socketTokens[0];
+            int port = Integer.parseInt(socketTokens[1]);
+            Pair<String, Integer> p = null;
+            switch (mode) {
+                case IP:
+                    Set<String> ncsOnIp = ncMap.get(host);
+                    if (ncsOnIp == null || ncsOnIp.isEmpty()) {
+                        throw new IllegalArgumentException("Invalid host " + host
+                                + " as it is not part of the AsterixDB cluster. Valid choices are "
+                                + StringUtils.join(ncMap.keySet(), ", "));
+                    }
+                    String[] ncArray = ncsOnIp.toArray(new String[] {});
+                    String nc = ncArray[random.nextInt(ncArray.length)];
+                    p = new Pair<String, Integer>(nc, port);
+                    break;
+
+                case NC:
+                    p = new Pair<String, Integer>(host, port);
+                    if (!ncs.contains(host)) {
+                        throw new IllegalArgumentException("Invalid NC " + host
+                                + " as it is not part of the AsterixDB cluster. Valid choices are "
+                                + StringUtils.join(ncs, ", "));
+
+                    }
+                    break;
+            }
+            sockets.add(p);
+        }
+    }
 }
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
index a92cb55..8f169f2 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapter.java
@@ -51,5 +51,10 @@
     public void stop() {
         ((RateControlledTupleParser) tupleParser).stop();
     }
+    
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PULL;
+    }
 
 }
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
index fb2ac1bd..dc558d7 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/RateControlledFileSystemBasedAdapterFactory.java
@@ -77,7 +77,7 @@
         if (configuration.get(KEY_FILE_SYSTEM) == null) {
             throw new Exception("File system type not specified. (fs=?) File system could be 'localfs' or 'hdfs'");
         }
-        if (configuration.get(KEY_SOURCE_DATATYPE) == null) {
+        if (configuration.get(IGenericAdapterFactory.KEY_TYPE_NAME) == null) {
             throw new Exception("Record type not specified (output-type-name=?)");
         }
         if (configuration.get(KEY_PATH) == null) {
@@ -270,4 +270,4 @@
             throw new HyracksDataException(ie);
         }
     }
-}
\ No newline at end of file
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java
new file mode 100644
index 0000000..4cbc4f1
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapter.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.OutputStream;
+import java.net.Socket;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.asterix.metadata.feeds.IFeedAdapter;
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class SocketClientAdapter implements IFeedAdapter {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final Logger LOGGER = Logger.getLogger(SocketClientAdapter.class.getName());
+
+    private static final String LOCALHOST = "127.0.0.1";
+
+    private final String localFile;
+
+    private final int port;
+
+    private final IHyracksTaskContext ctx;
+
+    private boolean continueStreaming = true;
+
+    public SocketClientAdapter(Integer port, String localFile, IHyracksTaskContext ctx) {
+        this.localFile = localFile;
+        this.port = port;
+        this.ctx = ctx;
+    }
+
+    @Override
+    public void start(int partition, IFrameWriter writer) throws Exception {
+        Socket socket = new Socket(LOCALHOST, port);
+        OutputStream os = socket.getOutputStream();
+        FileInputStream fin = new FileInputStream(new File(localFile));
+        byte[] chunk = new byte[1024];
+        int read;
+        try {
+            while (continueStreaming) {
+                read = fin.read(chunk);
+                if (read > 0) {
+                    os.write(chunk, 0, read);
+                } else {
+                    break;
+                }
+            }
+        } finally {
+            socket.close();
+            fin.close();
+        }
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Finished streaming file " + localFile + "to port [" + port + "]");
+        }
+    }
+
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PUSH;
+    }
+
+    @Override
+    public void stop() throws Exception {
+        continueStreaming = false;
+    }
+
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java
new file mode 100644
index 0000000..f21a740
--- /dev/null
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/SocketClientAdapterFactory.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2009-2013 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.asterix.tools.external.data;
+
+import java.util.Map;
+
+import edu.uci.ics.asterix.common.exceptions.AsterixException;
+import edu.uci.ics.asterix.metadata.feeds.IDatasourceAdapter;
+import edu.uci.ics.asterix.metadata.feeds.ITypedAdapterFactory;
+import edu.uci.ics.asterix.om.types.ARecordType;
+import edu.uci.ics.asterix.om.types.AUnorderedListType;
+import edu.uci.ics.asterix.om.types.BuiltinType;
+import edu.uci.ics.asterix.om.types.IAType;
+import edu.uci.ics.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import edu.uci.ics.hyracks.algebricks.common.utils.Pair;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+
+public class SocketClientAdapterFactory implements ITypedAdapterFactory {
+
+    private static final long serialVersionUID = 1L;
+
+    private static final ARecordType outputType = initOutputType();
+
+    private GenericSocketFeedAdapterFactory genericSocketAdapterFactory;
+
+    private String[] fileSplits;
+
+    public static final String KEY_FILE_SPLITS = "file_splits";
+
+    @Override
+    public SupportedOperation getSupportedOperations() {
+        return SupportedOperation.READ;
+    }
+
+    private static ARecordType initOutputType() {
+        ARecordType outputType = null;
+        try {
+            String[] userFieldNames = new String[] { "screen-name", "lang", "friends_count", "statuses_count", "name",
+                    "followers_count" };
+
+            IAType[] userFieldTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING, BuiltinType.AINT32,
+                    BuiltinType.AINT32, BuiltinType.ASTRING, BuiltinType.AINT32 };
+            ARecordType userRecordType = new ARecordType("TwitterUserType", userFieldNames, userFieldTypes, false);
+
+            String[] fieldNames = new String[] { "tweetid", "user", "sender-location", "send-time", "referred-topics",
+                    "message-text" };
+
+            AUnorderedListType unorderedListType = new AUnorderedListType(BuiltinType.ASTRING, "referred-topics");
+            IAType[] fieldTypes = new IAType[] { BuiltinType.AINT64, userRecordType, BuiltinType.APOINT,
+                    BuiltinType.ADATETIME, unorderedListType, BuiltinType.ASTRING };
+            outputType = new ARecordType("TweetMessageType", fieldNames, fieldTypes, false);
+
+        } catch (AsterixException e) {
+            throw new IllegalStateException("Unable to initialize output type");
+        }
+        return outputType;
+    }
+
+    @Override
+    public String getName() {
+        return "socket_client";
+    }
+
+    @Override
+    public AdapterType getAdapterType() {
+        return AdapterType.TYPED;
+    }
+
+    @Override
+    public AlgebricksPartitionConstraint getPartitionConstraint() throws Exception {
+        return genericSocketAdapterFactory.getPartitionConstraint();
+    }
+
+    @Override
+    public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+        Pair<String, Integer> socket = genericSocketAdapterFactory.getSockets().get(partition);
+        return new SocketClientAdapter(socket.second, fileSplits[partition], ctx);
+    }
+
+    @Override
+    public ARecordType getAdapterOutputType() {
+        return outputType;
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws Exception {
+        String fileSplitsValue = configuration.get(KEY_FILE_SPLITS);
+        if (fileSplitsValue == null) {
+            throw new IllegalArgumentException(
+                    "File splits not specified. File split is specified as a comma separated list of paths");
+        }
+        fileSplits = fileSplitsValue.trim().split(",");
+        genericSocketAdapterFactory = new GenericSocketFeedAdapterFactory();
+        genericSocketAdapterFactory.configure(configuration, outputType);
+    }
+}
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java
index b92c3fd..8f252e6 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TweetGenerator.java
@@ -1,11 +1,11 @@
 /*
- * Copyright 2009-2013 by The Regents of the University of California
+x * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -18,6 +18,8 @@
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
 import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 
 import edu.uci.ics.asterix.tools.external.data.DataGenerator.InitializationInfo;
 import edu.uci.ics.asterix.tools.external.data.DataGenerator.TweetMessage;
@@ -25,50 +27,60 @@
 
 public class TweetGenerator {
 
+    private static Logger LOGGER = Logger.getLogger(TweetGenerator.class.getName());
+
     public static final String KEY_DURATION = "duration";
     public static final String KEY_TPS = "tps";
-    public static final String KEY_MIN_TPS = "tps-min";
-    public static final String KEY_MAX_TPS = "tps-max";
-    public static final String KEY_TPUT_DURATION = "tput-duration";
     public static final String KEY_GUID_SEED = "guid-seed";
 
     public static final String OUTPUT_FORMAT = "output-format";
     public static final String OUTPUT_FORMAT_ARECORD = "arecord";
     public static final String OUTPUT_FORMAT_ADM_STRING = "adm-string";
 
+    private static final int DEFAULT_DURATION = 60; //seconds
+    private static final int DEFAULT_GUID_SEED = 0;
+
     private int duration;
     private TweetMessageIterator tweetIterator = null;
+    private int partition;
+    private int tweetCount = 0;
     private int frameTweetCount = 0;
     private int numFlushedTweets = 0;
     private OutputStream os;
     private DataGenerator dataGenerator = null;
     private ByteBuffer outputBuffer = ByteBuffer.allocate(32 * 1024);
+    private GULongIDGenerator uidGenerator;
 
-    public TweetGenerator(Map<String, String> configuration, int partition, String format) throws Exception {
-        String value = configuration.get(KEY_DURATION);
-        duration = value != null ? Integer.parseInt(value) : 60;
-        InitializationInfo info = new InitializationInfo();
-        info.timeDurationInSecs = duration;
-        dataGenerator = new DataGenerator(info);
-
-        String seedValue = configuration.get(KEY_GUID_SEED);
-        int seedInt = seedValue != null ? Integer.parseInt(seedValue) : 0;
-        tweetIterator = dataGenerator.new TweetMessageIterator(duration, partition, (byte) seedInt);
+    public int getTweetCount() {
+        return tweetCount;
     }
 
-    private void writeTweetString(TweetMessage next) throws IOException {
-        String tweet = next.toString() + "\n";
+    public TweetGenerator(Map<String, String> configuration, int partition, String format, OutputStream os)
+            throws Exception {
+        this.partition = partition;
+        String value = configuration.get(KEY_DURATION);
+        this.duration = value != null ? Integer.parseInt(value) : DEFAULT_DURATION;
+        int guidSeed = configuration.get(KEY_GUID_SEED) != null ? Integer.parseInt(configuration.get(KEY_GUID_SEED))
+                : DEFAULT_GUID_SEED;
+        uidGenerator = new GULongIDGenerator(partition, (byte) (guidSeed));
+        dataGenerator = new DataGenerator(new InitializationInfo());
+        tweetIterator = dataGenerator.new TweetMessageIterator(duration, uidGenerator);
+        this.os = os;
+    }
+
+    private void writeTweetString(TweetMessage tweetMessage) throws IOException {
+        String tweet = tweetMessage.toString() + "\n";
+        tweetCount++;
         byte[] b = tweet.getBytes();
         if (outputBuffer.position() + b.length > outputBuffer.limit()) {
             flush();
             numFlushedTweets += frameTweetCount;
             frameTweetCount = 0;
             outputBuffer.put(b);
-            frameTweetCount++;
         } else {
             outputBuffer.put(b);
-            frameTweetCount++;
         }
+        frameTweetCount++;
     }
 
     public int getNumFlushedTweets() {
@@ -83,18 +95,22 @@
     }
 
     public boolean setNextRecordBatch(int numTweetsInBatch) throws Exception {
-        int count = 0;
-        if (tweetIterator.hasNext()) {
+        boolean moreData = tweetIterator.hasNext();
+        if (!moreData) {
+            if (outputBuffer.position() > 0) {
+                flush();
+            }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Reached end of batch. Tweet Count: [" + partition + "]" + tweetCount);
+            }
+            return false;
+        } else {
+            int count = 0;
             while (count < numTweetsInBatch) {
                 writeTweetString(tweetIterator.next());
                 count++;
             }
             return true;
         }
-        return false;
     }
-
-    public void setOutputStream(OutputStream os) {
-        this.os = os;
-    }
-}
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java
index 07e018a..9e3c4dd 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapter.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2013 by The Regents of the University of California
+x * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -17,14 +17,9 @@
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.OutputStream;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.util.Date;
+import java.io.PipedInputStream;
+import java.io.PipedOutputStream;
 import java.util.Map;
-import java.util.Random;
-import java.util.Timer;
-import java.util.TimerTask;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.logging.Level;
@@ -39,6 +34,8 @@
 
 /**
  * TPS can be configured between 1 and 20,000
+ * 
+ * @author ramang
  */
 public class TwitterFirehoseFeedAdapter extends StreamBasedAdapter implements IFeedAdapter {
 
@@ -46,211 +43,117 @@
 
     private static final Logger LOGGER = Logger.getLogger(TwitterFirehoseFeedAdapter.class.getName());
 
-    private final TwitterServer twitterServer;
-
-    private TwitterClient twitterClient;
-
-    private static final String LOCALHOST = "127.0.0.1";
-    private static final int PORT = 2909;
-    private static final int TPUT_DURATION_DEFAULT = 5; // 5 seconds
-
     private ExecutorService executorService = Executors.newCachedThreadPool();
 
+    private PipedOutputStream outputStream = new PipedOutputStream();
+
+    private PipedInputStream inputStream = new PipedInputStream(outputStream);
+
+    private final TwitterServer twitterServer;
+
     public TwitterFirehoseFeedAdapter(Map<String, String> configuration, ITupleParserFactory parserFactory,
-            ARecordType outputtype, IHyracksTaskContext ctx, int partition) throws Exception {
+            ARecordType outputtype, int partition, IHyracksTaskContext ctx) throws Exception {
         super(parserFactory, outputtype, ctx);
-        this.twitterServer = new TwitterServer(configuration, outputtype, executorService, partition);
-        this.twitterClient = new TwitterClient(twitterServer.getPort());
+        this.twitterServer = new TwitterServer(configuration, partition, outputtype, outputStream, executorService);
     }
 
     @Override
     public void start(int partition, IFrameWriter writer) throws Exception {
         twitterServer.start();
-        twitterClient.start();
         super.start(partition, writer);
     }
 
     @Override
     public InputStream getInputStream(int partition) throws IOException {
-        return twitterClient.getInputStream();
+        return inputStream;
     }
 
-    private static class TwitterServer {
-        private ServerSocket serverSocket;
-        private final Listener listener;
-        private int port = -1;
-        private ExecutorService executorService;
+    public static class TwitterServer {
+        private final DataProvider dataProvider;
+        private final ExecutorService executorService;
 
-        public TwitterServer(Map<String, String> configuration, ARecordType outputtype,
-                ExecutorService executorService, int partition) throws Exception {
-            int numAttempts = 0;
-            while (port < 0) {
-                try {
-                    serverSocket = new ServerSocket(PORT + numAttempts);
-                    port = PORT + numAttempts;
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("port: " + (PORT + numAttempts) + " unusable ");
-                    }
-                    numAttempts++;
-                }
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Twitter server configured to use port: " + port);
-            }
-            String dvds = configuration.get("dataverse-dataset");
-            listener = new Listener(serverSocket, configuration, outputtype, dvds, partition);
+        public TwitterServer(Map<String, String> configuration, int partition, ARecordType outputtype, OutputStream os,
+                ExecutorService executorService) throws Exception {
+            dataProvider = new DataProvider(configuration, outputtype, partition, os);
             this.executorService = executorService;
         }
 
-        public void start() {
-            executorService.execute(listener);
-        }
-
         public void stop() throws IOException {
-            listener.stop();
-            serverSocket.close();
+            dataProvider.stop();
         }
 
-        public int getPort() {
-            return port;
-        }
-    }
-
-    private static class TwitterClient {
-
-        private Socket socket;
-        private int port;
-
-        public TwitterClient(int port) throws UnknownHostException, IOException {
-            this.port = port;
-        }
-
-        public InputStream getInputStream() throws IOException {
-            return socket.getInputStream();
-        }
-
-        public void start() throws UnknownHostException, IOException {
-            socket = new Socket(LOCALHOST, port);
+        public void start() {
+            executorService.execute(dataProvider);
         }
 
     }
 
-    private static class Listener implements Runnable {
-
-        private final ServerSocket serverSocket;
-        private Socket socket;
-        private TweetGenerator tweetGenerator;
-        private boolean continuePush = true;
-        private int fixedTps = -1;
-        private int minTps = -1;
-        private int maxTps = -1;
-        private int tputDuration;
-        private Rate task;
-        private Mode mode;
+    public static class DataProvider implements Runnable {
 
         public static final String KEY_MODE = "mode";
 
+        private TweetGenerator tweetGenerator;
+        private boolean continuePush = true;
+        private int batchSize;
+        private final Mode mode;
+        private final OutputStream os;
+
         public static enum Mode {
             AGGRESSIVE,
-            CONTROLLED,
+            CONTROLLED
         }
 
-        public Listener(ServerSocket serverSocket, Map<String, String> configuration, ARecordType outputtype,
-                String datasetName, int partition) throws Exception {
-            this.serverSocket = serverSocket;
-            this.tweetGenerator = new TweetGenerator(configuration, partition, TweetGenerator.OUTPUT_FORMAT_ADM_STRING);
-            String value = configuration.get(KEY_MODE);
-            String confValue = null;
-            if (value != null) {
-                mode = Mode.valueOf(value.toUpperCase());
-                switch (mode) {
-                    case AGGRESSIVE:
-                        break;
-                    case CONTROLLED:
-                        confValue = configuration.get(TweetGenerator.KEY_TPS);
-                        if (confValue != null) {
-                            minTps = Integer.parseInt(confValue);
-                            maxTps = minTps;
-                            fixedTps = minTps;
-                        } else {
-                            confValue = configuration.get(TweetGenerator.KEY_MIN_TPS);
-                            if (confValue != null) {
-                                minTps = Integer.parseInt(confValue);
-                            }
-                            confValue = configuration.get(TweetGenerator.KEY_MAX_TPS);
-                            if (confValue != null) {
-                                maxTps = Integer.parseInt(configuration.get(TweetGenerator.KEY_MAX_TPS));
-                            }
-
-                            if (minTps < 0 || maxTps < 0 || minTps > maxTps) {
-                                throw new IllegalArgumentException("Incorrect value for min/max TPS");
-                            }
-                        }
-
-                }
-            } else {
-                mode = Mode.AGGRESSIVE;
+        public DataProvider(Map<String, String> configuration, ARecordType outputtype, int partition, OutputStream os)
+                throws Exception {
+            this.tweetGenerator = new TweetGenerator(configuration, partition, TweetGenerator.OUTPUT_FORMAT_ADM_STRING,
+                    os);
+            this.os = os;
+            mode = configuration.get(KEY_MODE) != null ? Mode.valueOf(configuration.get(KEY_MODE).toUpperCase())
+                    : Mode.AGGRESSIVE;
+            switch (mode) {
+                case CONTROLLED:
+                    String tpsValue = configuration.get(TweetGenerator.KEY_TPS);
+                    if (tpsValue == null) {
+                        throw new IllegalArgumentException("TPS value not configured. use tps=<value>");
+                    }
+                    batchSize = Integer.parseInt(tpsValue);
+                    break;
+                case AGGRESSIVE:
+                    batchSize = 5000;
+                    break;
             }
-
-            value = configuration.get(TweetGenerator.KEY_TPUT_DURATION);
-            tputDuration = value != null ? Integer.parseInt(value) : TPUT_DURATION_DEFAULT;
-            task = new Rate(tweetGenerator, tputDuration, datasetName, partition);
         }
 
         @Override
         public void run() {
-            while (true) {
-                try {
-                    socket = serverSocket.accept();
-                    OutputStream os = socket.getOutputStream();
-                    tweetGenerator.setOutputStream(os);
-                    boolean moreData = true;
-                    Timer timer = new Timer();
-                    timer.schedule(task, tputDuration * 1000, tputDuration * 1000);
-                    long startBatch;
-                    long endBatch;
-                    Random random = new Random();
-                    int batchSize = 0;
-                    while (moreData && continuePush) {
-                        switch (mode) {
-                            case CONTROLLED:
-                                if (maxTps > 0) {
-                                    batchSize = minTps + random.nextInt((maxTps + 1) - minTps);
-                                } else {
-                                    batchSize = fixedTps;
-                                }
-                                startBatch = System.currentTimeMillis();
-                                moreData = tweetGenerator.setNextRecordBatch(batchSize);
-                                endBatch = System.currentTimeMillis();
-                                if (endBatch - startBatch < 1000) {
-                                    Thread.sleep(1000 - (endBatch - startBatch));
-                                }
-                                break;
-                            case AGGRESSIVE:
-                                batchSize = Integer.MAX_VALUE;
-                                moreData = tweetGenerator.setNextRecordBatch(batchSize);
-                        }
-                    }
-                    timer.cancel();
-                    os.close();
-                    break;
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Exception in adaptor " + e.getMessage());
-                    }
-                } finally {
-                    try {
-                        if (socket != null && socket.isClosed()) {
-                            socket.close();
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Closed socket:" + socket.getPort());
-                            }
-                        }
-                    } catch (IOException e) {
-                        e.printStackTrace();
-                    }
+            boolean moreData = true;
+            long startBatch;
+            long endBatch;
 
+            try {
+                while (moreData && continuePush) {
+                    switch (mode) {
+                        case AGGRESSIVE:
+                            moreData = tweetGenerator.setNextRecordBatch(batchSize);
+                            break;
+                        case CONTROLLED:
+                            startBatch = System.currentTimeMillis();
+                            moreData = tweetGenerator.setNextRecordBatch(batchSize);
+                            endBatch = System.currentTimeMillis();
+                            if (endBatch - startBatch < 1000) {
+                                Thread.sleep(1000 - (endBatch - startBatch));
+                            } else {
+                                if (LOGGER.isLoggable(Level.WARNING)) {
+                                    LOGGER.warning("Unable to reach the required tps of " + batchSize);
+                                }
+                            }
+                            break;
+                    }
+                }
+                os.close();
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Exception in adaptor " + e.getMessage());
                 }
             }
         }
@@ -259,37 +162,6 @@
             continuePush = false;
         }
 
-        private static class Rate extends TimerTask {
-
-            private final TweetGenerator gen;
-            private final int tputDuration;
-            private final int partition;
-            private final String dataset;
-            private int prevMeasuredTweets = 0;
-
-            public Rate(TweetGenerator gen, int tputDuration, String dataset, int partition) {
-                this.gen = gen;
-                this.tputDuration = tputDuration;
-                this.dataset = dataset;
-                this.partition = partition;
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning(new Date() + " " + "Dataset" + " " + "partition" + " " + "Total flushed tweets"
-                            + "\t" + "intantaneous throughput");
-                }
-            }
-
-            @Override
-            public void run() {
-                int currentMeasureTweets = gen.getNumFlushedTweets();
-                if (LOGGER.isLoggable(Level.FINE)) {
-                    LOGGER.fine(dataset + " " + partition + " " + gen.getNumFlushedTweets() + "\t"
-                            + ((currentMeasureTweets - prevMeasuredTweets) / tputDuration) + " ID "
-                            + Thread.currentThread().getId());
-                }
-                prevMeasuredTweets = currentMeasureTweets;
-            }
-
-        }
     }
 
     @Override
@@ -297,4 +169,9 @@
         twitterServer.stop();
     }
 
-}
+    @Override
+    public DataExchangeMode getDataExchangeMode() {
+        return DataExchangeMode.PUSH;
+    }
+
+}
\ No newline at end of file
diff --git a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java
index 2305c32..41b1915 100644
--- a/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java
+++ b/asterix-tools/src/main/java/edu/uci/ics/asterix/tools/external/data/TwitterFirehoseFeedAdapterFactory.java
@@ -1,5 +1,5 @@
 /*
-x * Copyright 2009-2012 by The Regents of the University of California
+x * Copyright 2009-2013 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -41,13 +41,6 @@
     private static final long serialVersionUID = 1L;
 
     /*
-     * The dataverse and dataset names for the target feed dataset. This informaiton 
-     * is used in configuring partition constraints for the adapter. It is preferred that 
-     * the adapter location does not coincide with a partition location for the feed dataset.
-     */
-    private static final String KEY_DATAVERSE_DATASET = "dataverse-dataset";
-
-    /*
      * Degree of parallelism for feed ingestion activity. Defaults to 1.
      * This builds up the count constraint for the ingestion operator.
      */
@@ -106,7 +99,7 @@
 
     @Override
     public IDatasourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
-        return new TwitterFirehoseFeedAdapter(configuration, parserFactory, outputType, ctx, partition);
+        return new TwitterFirehoseFeedAdapter(configuration, parserFactory, outputType, partition, ctx);
     }
 
     @Override
diff --git a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
index 4a20cba..cbfd119 100644
--- a/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
+++ b/asterix-transactions/src/main/java/edu/uci/ics/asterix/transaction/management/service/locking/ConcurrentLockManager.java
@@ -442,17 +442,20 @@
             // we don't know the job, so there are no locks for it - we're done
             return;
         }
+        long holder;
         synchronized (jobArenaMgr) {
-            long holder = jobArenaMgr.getLastHolder(jobSlot);
-            while (holder != -1) {
-                long resource = reqArenaMgr.getResourceId(holder);
-                int dsId = resArenaMgr.getDatasetId(resource);
-                int pkHashVal = resArenaMgr.getPkHashVal(resource);
-                unlock(new DatasetId(dsId), pkHashVal, LockMode.ANY, txnContext);
+            holder = jobArenaMgr.getLastHolder(jobSlot);
+        }
+        while (holder != -1) {
+            long resource = reqArenaMgr.getResourceId(holder);
+            int dsId = resArenaMgr.getDatasetId(resource);
+            int pkHashVal = resArenaMgr.getPkHashVal(resource);
+            unlock(new DatasetId(dsId), pkHashVal, LockMode.ANY, txnContext);
+            synchronized (jobArenaMgr) {
                 holder = jobArenaMgr.getLastHolder(jobSlot);
             }
-            jobArenaMgr.deallocate(jobSlot);
         }
+        jobArenaMgr.deallocate(jobSlot);
         //System.err.println(table.append(new StringBuilder(), true).toString());        
         //System.out.println("jobArenaMgr " + jobArenaMgr.addTo(new Stats()).toString());
         //System.out.println("resArenaMgr " + resArenaMgr.addTo(new Stats()).toString());