Merged fullstack_lsm_staging upto r3336

git-svn-id: https://hyracks.googlecode.com/svn/trunk/fullstack@3339 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
deleted file mode 100644
index f392b34..0000000
--- a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
+++ /dev/null
@@ -1,87 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  <artifactId>btreeapp</artifactId>
-  <name>btreeapp</name>
-
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>btree-example</artifactId>
-    <version>0.2.4-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
-  		<artifactId>btreehelper</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<scope>compile</scope>
-  	</dependency>
-  </dependencies>
-</project>
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
deleted file mode 100644
index 43ace6c..0000000
--- a/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<assembly>
-  <id>app-assembly</id>
-  <formats>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>target/application/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
index b6e8c72..a939a04 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -41,15 +41,14 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
 import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 // This example will insert tuples into the primary and secondary index using an insert pipeline
@@ -62,9 +61,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
         public String ncs;
 
@@ -88,7 +84,7 @@
         JobSpecification job = createJob(options);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
+        JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
@@ -124,7 +120,7 @@
         // run data generator on first nodecontroller given
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
 
-        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
         // prepare insertion into primary index
@@ -151,9 +147,9 @@
 
         // create operator descriptor
         TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, primaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null,
-                NoOpOperationCallbackProvider.INSTANCE);
+                spec, recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, null, primaryFieldPermutation, IndexOperation.INSERT,
+                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
 
         // prepare insertion into secondary index
@@ -175,9 +171,9 @@
                 options.secondaryBTreeName);
         // create operator descriptor
         TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, recDesc, storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null,
-                NoOpOperationCallbackProvider.INSTANCE);
+                spec, recDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOperation.INSERT,
+                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
 
         // end the insert pipeline at this sink operator
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index a6c7ea6..616fdae 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -40,14 +40,13 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 // This example will load a primary index from randomly generated data
@@ -60,9 +59,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
         public String ncs;
 
@@ -86,7 +82,7 @@
         JobSpecification job = createJob(options);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
+        JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
@@ -141,7 +137,7 @@
         typeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
 
         // create providers for B-Tree
-        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
         // the B-Tree expects its keyfields to be at the front of its input
@@ -152,8 +148,8 @@
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, btreeSplitProvider, typeTraits, comparatorFactories,
-                fieldPermutation, 0.7f, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null,
+                fieldPermutation, 0.7f, false, 1000L, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
         // distribute the records from the datagen via hashing to the bulk load
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index d24ba33..3493947 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -38,14 +38,13 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 // This example will perform an ordered scan on the primary index
@@ -59,9 +58,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
         public String ncs;
 
@@ -79,7 +75,7 @@
         JobSpecification job = createJob(options);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
+        JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
@@ -103,7 +99,7 @@
         comparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
         // create roviders for B-Tree
-        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
         // schema of tuples coming out of primary index
@@ -142,8 +138,8 @@
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
         BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
-                indexRegistryProvider, btreeSplitProvider, typeTraits, comparatorFactories, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                lcManagerProvider, btreeSplitProvider, typeTraits, comparatorFactories, null, lowKeyFields,
+                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, btreeSearchOp, splitNCs);
 
         // have each node print the results of its respective B-Tree
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 5aa338a..075ea20 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -33,15 +33,14 @@
 import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDiskOrderScanOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 // This example will load a secondary index with <key, primary-index key> pairs
@@ -55,9 +54,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
         public String ncs;
 
@@ -81,7 +77,7 @@
         JobSpecification job = createJob(options);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
+        JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
@@ -93,7 +89,7 @@
 
         String[] splitNCs = options.ncs.split(",");
 
-        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
         // schema of tuples that we are retrieving from the primary index
@@ -122,8 +118,8 @@
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
         TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
-                recDesc, storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                recDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
+                dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
 
         // sort the tuples as preparation for bulk load into secondary index
@@ -144,8 +140,8 @@
         int[] fieldPermutation = { 1, 0 };
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, btreeSplitProvider, secondaryTypeTraits, comparatorFactories,
-                fieldPermutation, 0.7f, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, btreeSplitProvider, secondaryTypeTraits, comparatorFactories, null,
+                fieldPermutation, 0.7f, false, 1000L, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
         // connect the ops
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 277668b..a6ffc13 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -38,14 +38,13 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.IndexRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.IndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 // This example will perform range search on the secondary index
@@ -59,9 +58,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
         public String ncs;
 
@@ -82,7 +78,7 @@
         JobSpecification job = createJob(options);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job);
+        JobId jobId = hcc.startJob(job);
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
@@ -94,7 +90,7 @@
 
         String[] splitNCs = options.ncs.split(",");
 
-        IIndexRegistryProvider<IIndex> indexRegistryProvider = IndexRegistryProvider.INSTANCE;
+        IIndexLifecycleManagerProvider lcManagerProvider = IndexLifecycleManagerProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
         // schema of tuples coming out of secondary index
@@ -168,9 +164,9 @@
                 options.secondaryBTreeName);
         IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
         BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
-                storageManager, indexRegistryProvider, secondarySplitProvider, secondaryTypeTraits,
-                searchComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                searchComparatorFactories, null, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
 
         // secondary index will output tuples with [UTF8String, Integer]
@@ -184,9 +180,9 @@
 
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primarySplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, primaryLowKeyFields, primaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                null, primaryLowKeyFields, primaryHighKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
         JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
 
         // have each node print the results of its respective B-Tree
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
new file mode 100644
index 0000000..6a17755
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexLifecycleManagerProvider.java
@@ -0,0 +1,15 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+
+public enum IndexLifecycleManagerProvider implements IIndexLifecycleManagerProvider {
+    INSTANCE;
+
+    @Override
+    public IIndexLifecycleManager getLifecycleManager(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getIndexLifecycleManager();
+    }
+
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java
deleted file mode 100644
index d1f40d8..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/IndexRegistryProvider.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
-
-public class IndexRegistryProvider implements IIndexRegistryProvider<IIndex> {
-    private static final long serialVersionUID = 1L;
-
-    public static final IndexRegistryProvider INSTANCE = new IndexRegistryProvider();
-
-    private IndexRegistryProvider() {
-    }
-
-    @Override
-    public IndexRegistry<IIndex> getRegistry(IHyracksTaskContext ctx) {
-        return RuntimeContext.get(ctx).getIndexRegistry();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCApplicationEntryPoint.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCApplicationEntryPoint.java
new file mode 100644
index 0000000..a1dacd8
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCApplicationEntryPoint.java
@@ -0,0 +1,22 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.application.INCApplicationEntryPoint;
+
+public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
+    @Override
+    public void start(INCApplicationContext ncAppCtx, String[] args) throws Exception {
+        RuntimeContext rCtx = new RuntimeContext(ncAppCtx);
+        ncAppCtx.setApplicationObject(rCtx);
+    }
+
+    @Override
+    public void notifyStartupComplete() throws Exception {
+
+    }
+
+    @Override
+    public void stop() throws Exception {
+
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
deleted file mode 100644
index ea55e7a..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.application.INCApplicationContext;
-import edu.uci.ics.hyracks.api.application.INCBootstrap;
-
-public class NCBootstrap implements INCBootstrap {
-    private static final Logger LOGGER = Logger.getLogger(NCBootstrap.class.getName());
-
-    private INCApplicationContext appCtx;
-
-    @Override
-    public void start() throws Exception {
-        LOGGER.info("Starting NC Bootstrap");
-        RuntimeContext rCtx = new RuntimeContext(appCtx);
-        appCtx.setApplicationObject(rCtx);
-        LOGGER.info("Initialized RuntimeContext: " + rCtx);
-    }
-
-    @Override
-    public void stop() throws Exception {
-        LOGGER.info("Stopping Asterix NC Bootstrap");
-        RuntimeContext rCtx = (RuntimeContext) appCtx.getApplicationObject();
-        rCtx.close();
-    }
-
-    @Override
-    public void setApplicationContext(INCApplicationContext appCtx) {
-        this.appCtx = appCtx;
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
index 56d517f..aa5ce1d 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
@@ -17,8 +17,9 @@
 
 import edu.uci.ics.hyracks.api.application.INCApplicationContext;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManager;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexLifecycleManager;
 import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
 import edu.uci.ics.hyracks.storage.common.buffercache.DelayPageCleanerPolicy;
@@ -28,20 +29,30 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.storage.common.smi.TransientFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepositoryFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.TransientFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceRepositoryFactory;
 
 public class RuntimeContext {
-    private IndexRegistry<IIndex> indexRegistry;
     private IBufferCache bufferCache;
     private IFileMapManager fileMapManager;
+    private ILocalResourceRepository localResourceRepository;
+    private IIndexLifecycleManager lcManager;
+    private ResourceIdFactory resourceIdFactory;
 
-    public RuntimeContext(INCApplicationContext appCtx) {
+    public RuntimeContext(INCApplicationContext appCtx) throws HyracksDataException {
         fileMapManager = new TransientFileMapManager();
         ICacheMemoryAllocator allocator = new HeapBufferAllocator();
         IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
         bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs,
                 new DelayPageCleanerPolicy(1000), fileMapManager, 32768, 50, 100);
-        indexRegistry = new IndexRegistry<IIndex>();
+        lcManager = new IndexLifecycleManager();
+        ILocalResourceRepositoryFactory localResourceRepositoryFactory = new TransientLocalResourceRepositoryFactory();
+        localResourceRepository = localResourceRepositoryFactory.createRepository();
+        resourceIdFactory = (new ResourceIdFactoryProvider(localResourceRepository)).createResourceIdFactory();
     }
 
     public void close() {
@@ -56,11 +67,19 @@
         return fileMapManager;
     }
 
-    public IndexRegistry<IIndex> getIndexRegistry() {
-        return indexRegistry;
-    }
-
     public static RuntimeContext get(IHyracksTaskContext ctx) {
         return (RuntimeContext) ctx.getJobletContext().getApplicationContext().getApplicationObject();
     }
+
+    public ILocalResourceRepository getLocalResourceRepository() {
+        return localResourceRepository;
+    }
+
+    public ResourceIdFactory getResourceIdFactory() {
+        return resourceIdFactory;
+    }
+
+    public IIndexLifecycleManager getIndexLifecycleManager() {
+        return lcManager;
+    }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
index 4c6363b..e19847e 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/StorageManagerInterface.java
@@ -19,6 +19,8 @@
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceRepository;
+import edu.uci.ics.hyracks.storage.common.file.ResourceIdFactory;
 
 public class StorageManagerInterface implements IStorageManagerInterface {
     private static final long serialVersionUID = 1L;
@@ -37,4 +39,14 @@
     public IFileMapProvider getFileMapProvider(IHyracksTaskContext ctx) {
         return RuntimeContext.get(ctx).getFileMapManager();
     }
+
+    @Override
+    public ILocalResourceRepository getLocalResourceRepository(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getLocalResourceRepository();
+    }
+
+    @Override
+    public ResourceIdFactory getResourceIdFactory(IHyracksTaskContext ctx) {
+        return RuntimeContext.get(ctx).getResourceIdFactory();
+    }
 }
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
deleted file mode 100644
index ab0ecb3..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
+++ /dev/null
@@ -1 +0,0 @@
-nc.bootstrap.class=edu.uci.ics.hyracks.examples.btree.helper.NCBootstrap
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml b/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
new file mode 100644
index 0000000..93b5f4f
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeserver/pom.xml
@@ -0,0 +1,84 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  <artifactId>btreeserver</artifactId>
+  <version>0.2.4-SNAPSHOT</version>
+  <name>btreeserver</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <artifactId>btree-example</artifactId>
+    <version>0.2.4-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+                  <name>hyrackscc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+                  <name>hyracksnc</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+  		<artifactId>btreehelper</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-examples/btree-example/btreeserver/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeserver/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..cd598d9
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeserver/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,23 @@
+<assembly>
+  <id>binary-assembly</id>
+  <formats>
+    <format>zip</format>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target/appassembler/bin</directory>
+      <outputDirectory>bin</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>target/appassembler/lib</directory>
+      <outputDirectory>lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>docs</directory>
+      <outputDirectory>docs</outputDirectory>
+    </fileSet>
+  </fileSets>
+</assembly>
diff --git a/hyracks/hyracks-examples/btree-example/pom.xml b/hyracks/hyracks-examples/btree-example/pom.xml
index b6fbb95..622dc81 100644
--- a/hyracks/hyracks-examples/btree-example/pom.xml
+++ b/hyracks/hyracks-examples/btree-example/pom.xml
@@ -2,7 +2,6 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>btree-example</artifactId>
-  <version>0.2.4-SNAPSHOT</version>
   <packaging>pom</packaging>
   <name>btree-example</name>
 
@@ -15,6 +14,6 @@
   <modules>
     <module>btreehelper</module>
     <module>btreeclient</module>
-    <module>btreeapp</module>
+    <module>btreeserver</module>
   </modules>
 </project>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml
deleted file mode 100644
index 43ace6c..0000000
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/main/assembly/app-assembly.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<assembly>
-  <id>app-assembly</id>
-  <formats>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>target/application/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/conf/local_cluster.conf
similarity index 100%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/conf/local_cluster.conf
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/conf/local_cluster.conf
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/data/file1.txt
similarity index 100%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file1.txt
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/data/file1.txt
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/data/file2.txt
similarity index 100%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/data/file2.txt
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/data/file2.txt
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/job/wordcount/wordcount.job
similarity index 100%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/job/wordcount/wordcount.job
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/job/wordcount/wordcount.job
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
similarity index 60%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
index 6171214..36d612b 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/pom.xml
@@ -1,8 +1,9 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
-  <artifactId>hadoopcompatapp</artifactId>
-  <name>hadoopcompatapp</name>
+  <artifactId>hadoopcompatserver</artifactId>
+  <version>0.2.4-SNAPSHOT</version>
+  <name>hadoopcompatserver</name>
 
   <parent>
     <groupId>edu.uci.ics.hyracks.examples</groupId>
@@ -11,49 +12,31 @@
   </parent>
 
   <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
     <plugins>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
-            <id>copy-dependencies</id>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+                  <name>hyrackscc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+                  <name>hyracksnc</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
             <phase>package</phase>
             <goals>
-              <goal>copy-dependencies</goal>
+              <goal>assemble</goal>
             </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
           </execution>
         </executions>
       </plugin>
@@ -64,7 +47,7 @@
           <execution>
             <configuration>
               <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
               </descriptors>
             </configuration>
             <phase>package</phase>
@@ -79,8 +62,7 @@
       	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
       	<version>0.2.4-SNAPSHOT</version>
         <configuration>
-          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
-          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
+          <hyracksServerHome>${basedir}/target/hadoopcompatserver-${project.version}-binary-assembly</hyracksServerHome>
           <jvmOptions>${jvm.extraargs}</jvmOptions>
         </configuration>
         <executions>
@@ -90,9 +72,6 @@
             <goals>
               <goal>start-cc</goal>
             </goals>
-            <configuration>
-	      <workingDir>${project.build.directory}</workingDir>
-            </configuration>
           </execution>
           <execution>
             <id>hyracks-nc1-start</id>
@@ -104,7 +83,6 @@
               <nodeId>NC1</nodeId>
               <dataIpAddress>127.0.0.1</dataIpAddress>
               <ccHost>localhost</ccHost>
-	      <workingDir>${project.build.directory}</workingDir>
             </configuration>
           </execution>
           <execution>
@@ -117,38 +95,24 @@
               <nodeId>NC2</nodeId>
               <dataIpAddress>127.0.0.1</dataIpAddress>
               <ccHost>localhost</ccHost>
-	      <workingDir>${project.build.directory}</workingDir>
             </configuration>
           </execution>
           <execution>
-            <id>deploy-app</id>
-            <phase>pre-integration-test</phase>
+            <id>stop-services</id>
+            <phase>post-integration-test</phase>
             <goals>
-              <goal>deploy-app</goal>
+              <goal>stop-services</goal>
             </goals>
-            <configuration>
-              <ccHost>localhost</ccHost>
-              <appName>compat</appName>
-              <harFile>${project.build.directory}/hadoopcompatapp-${project.version}-app-assembly.zip</harFile>
-            </configuration>
           </execution>
-	     <execution>
-	       <id>stop-services</id>
-	       <phase>post-integration-test</phase>
-	       <goals>
-	         <goal>stop-services</goal>
-	       </goals>
-             </execution>
-          </executions>
+        </executions>
       </plugin>
       <plugin>
       	<groupId>org.apache.maven.plugins</groupId>
       	<artifactId>maven-compiler-plugin</artifactId>
       	<version>2.0.2</version>
         <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-          <fork>true</fork>
+          <source>1.6</source>
+          <target>1.6</target>
         </configuration>
       </plugin>
       <plugin>
@@ -174,6 +138,20 @@
         <version>0.2.4-SNAPSHOT</version>
         <scope>compile</scope>
      </dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
      <dependency>
         <groupId>edu.uci.ics.hyracks.examples.compat</groupId>
   	    <artifactId>hadoopcompatclient</artifactId>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..cd598d9
--- /dev/null
+++ b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,23 @@
+<assembly>
+  <id>binary-assembly</id>
+  <formats>
+    <format>zip</format>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target/appassembler/bin</directory>
+      <outputDirectory>bin</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>target/appassembler/lib</directory>
+      <outputDirectory>lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>docs</directory>
+      <outputDirectory>docs</outputDirectory>
+    </fileSet>
+  </fileSets>
+</assembly>
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java b/hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
similarity index 100%
rename from hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
rename to hyracks/hyracks-examples/hadoop-compat-example/hadoopcompatserver/src/test/java/edu/uci/ics/hyracks/examples/compat/test/WordCountCompatibilityIT.java
diff --git a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
index 9d07031..b4ba2a3 100644
--- a/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
+++ b/hyracks/hyracks-examples/hadoop-compat-example/pom.xml
@@ -14,7 +14,7 @@
   <modules>
     <module>hadoopcompathelper</module>
     <module>hadoopcompatclient</module>
-    <module>hadoopcompatapp</module>
+    <module>hadoopcompatserver</module>
   </modules>
 
   <dependencies>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
similarity index 74%
rename from hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
rename to hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
index f936a6e..e1d423f 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part1.txt
@@ -554,197 +554,4 @@
 2210|32|F|31689.46|1992-01-16|2-HIGH|Clerk#000000941|0| believe carefully quickly express pinto beans. deposi|61.1571|-149.883|61.1571|-149.883|
 2211|92|F|140031.23|1994-06-30|2-HIGH|Clerk#000000464|0|ffily bold courts e|61.1541|-149.958|61.1541|-149.958|
 2212|118|F|17231.05|1994-03-23|3-MEDIUM|Clerk#000000954|0|structions above the unusual requests use fur|61.135|-149.88|61.135|-149.88|
-2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|61.1101|-149.857|61.1101|-149.857|
-2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|61.1101|-149.857|61.1101|-149.857|
-2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|61.1101|-149.857|61.1101|-149.857|
-2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |61.1101|-149.857|61.1101|-149.857|
-2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|61.1101|-149.857|61.1101|-149.857|
-2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|61.1101|-149.857|61.1101|-149.857|
-2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|61.1101|-149.857|61.1101|-149.857|
-2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |61.1101|-149.857|61.1101|-149.857|
-2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|61.1101|-149.857|61.1101|-149.857|
-2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |61.1101|-149.857|61.1101|-149.857|
-2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|61.1101|-149.857|61.1101|-149.857|
-2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|61.1101|-149.857|61.1101|-149.857|
-2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|61.1101|-149.857|61.1101|-149.857|
-2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|61.1101|-149.857|61.1101|-149.857|
-2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|61.1101|-149.857|61.1101|-149.857|
-2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|61.1101|-149.857|61.1101|-149.857|
-2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|61.1101|-149.857|61.1101|-149.857|
-2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|61.1101|-149.857|61.1101|-149.857|
-2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|61.2141|-149.864|61.2141|-149.864|
-2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|61.2171|-149.9|61.2171|-149.9|
-2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|61.2171|-149.9|61.2171|-149.9|
-2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|61.2171|-149.9|61.2171|-149.9|
-2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|61.2171|-149.9|61.2171|-149.9|
-2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|61.2171|-149.9|61.2171|-149.9|
-2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|61.2171|-149.9|61.2171|-149.9|
-2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|61.2171|-149.9|61.2171|-149.9|
-2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|61.2171|-149.9|61.2171|-149.9|
-2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|61.2171|-149.9|61.2171|-149.9|
-2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|61.2171|-149.9|61.2171|-149.9|
-2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|61.2171|-149.9|61.2171|-149.9|
-2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |61.2171|-149.9|61.2171|-149.9|
-2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |61.2171|-149.9|61.2171|-149.9|
-2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|61.2171|-149.9|61.2171|-149.9|
-2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|61.2171|-149.9|61.2171|-149.9|
-2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|64.8487|-147.704|64.8487|-147.704|
-2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |64.8486|-147.705|64.8486|-147.705|
-2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |64.8087|-147.71|64.8087|-147.71|
-2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|64.8363|-147.758|64.8363|-147.758|
-2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|64.8476|-147.704|64.8476|-147.704|
-2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|64.8302|-147.744|64.8302|-147.744|
-2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|64.8476|-147.812|64.8476|-147.812|
-2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|64.8144|-147.756|64.8144|-147.756|
-2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |64.8183|-147.778|64.8183|-147.778|
-2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|64.8494|-147.818|64.8494|-147.818|
-2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |64.849|-147.822|64.849|-147.822|
-2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|64.8367|-147.716|64.8367|-147.716|
-2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |64.8127|-147.772|64.8127|-147.772|
-2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|64.8143|-147.751|64.8143|-147.751|
-2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|64.842|-147.721|64.842|-147.721|
-2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|64.8403|-147.714|64.8403|-147.714|
-2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|64.8371|-147.881|64.8371|-147.881|
-2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|64.8151|-147.707|64.8151|-147.707|
-2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|64.8151|-147.707|64.8151|-147.707|
-2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|64.8541|-147.81|64.8541|-147.81|
-2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|64.8878|-147.496|64.8878|-147.496|
-2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|64.8299|-147.728|64.8299|-147.728|
-2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|64.8132|-147.762|64.8132|-147.762|
-2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |64.8372|-147.713|64.8372|-147.713|
-2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|64.7927|-148.036|64.7927|-148.036|
-2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|64.8717|-147.819|64.8717|-147.819|
-2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|64.8527|-147.686|64.8527|-147.686|
-2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|64.8371|-147.811|64.8371|-147.811|
-2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|64.846|-147.705|64.846|-147.705|
-2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|64.9064|-147.726|64.9064|-147.726|
-2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|64.9347|-147.56|64.9347|-147.56|
-2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|64.8861|-147.677|64.8861|-147.677|
-2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|64.8302|-147.744|64.8302|-147.744|
-2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|60.6673|-151.311|60.6673|-151.311|
-2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|60.6997|-151.38|60.6997|-151.38|
-2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |60.5658|-151.244|60.5658|-151.244|
-2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|60.6331|-151.163|60.6331|-151.163|
-2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|60.6331|-151.163|60.6331|-151.163|
-2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|60.6331|-151.163|60.6331|-151.163|
-2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|60.6201|-151.332|60.6201|-151.332|
-2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|60.5004|-151.276|60.5004|-151.276|
-2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|60.6331|-151.163|60.6331|-151.163|
-2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |60.6331|-151.163|60.6331|-151.163|
-2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|60.6901|-151.321|60.6901|-151.321|
-2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|60.6676|-151.29|60.6676|-151.29|
-2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|60.6331|-151.163|60.6331|-151.163|
-2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|60.5632|-151.266|60.5632|-151.266|
-2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|60.6331|-151.163|60.6331|-151.163|
-2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|60.6331|-151.163|60.6331|-151.163|
-2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|60.6509|-151.342|60.6509|-151.342|
-2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |60.5601|-151.107|60.5601|-151.107|
-2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|60.5123|-151.275|60.5123|-151.275|
-2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|60.6076|-151.325|60.6076|-151.325|
-2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|60.6331|-151.163|60.6331|-151.163|
-2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|60.5175|-151.235|60.5175|-151.235|
-2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|60.5535|-151.108|60.5535|-151.108|
-2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|60.5614|-151.275|60.5614|-151.275|
-2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|60.5647|-151.195|60.5647|-151.195|
-2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|60.6331|-151.163|60.6331|-151.163|
-2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|60.6331|-151.163|60.6331|-151.163|
-2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |60.6331|-151.163|60.6331|-151.163|
-2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|60.6331|-151.163|60.6331|-151.163|
-2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|60.6331|-151.163|60.6331|-151.163|
-2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |60.6678|-151.31|60.6678|-151.31|
-2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|60.5003|-151.276|60.5003|-151.276|
-2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|60.6331|-151.163|60.6331|-151.163|
-2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |61.5855|-149.326|61.5855|-149.326|
-2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |61.5979|-149.437|61.5979|-149.437|
-2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|61.6141|-149.457|61.6141|-149.457|
-2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|61.5799|-149.461|61.5799|-149.461|
-2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|61.5845|-149.337|61.5845|-149.337|
-2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|61.5351|-149.558|61.5351|-149.558|
-2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |61.5811|-149.45|61.5811|-149.45|
-2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|61.5793|-149.442|61.5793|-149.442|
-2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|61.5661|-149.313|61.5661|-149.313|
-2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |61.6141|-149.457|61.6141|-149.457|
-2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |61.5786|-149.332|61.5786|-149.332|
-2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|61.5811|-149.45|61.5811|-149.45|
-2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|61.5825|-149.429|61.5825|-149.429|
-2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|61.6141|-149.457|61.6141|-149.457|
-2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|61.5531|-149.651|61.5531|-149.651|
-2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|61.5531|-149.651|61.5531|-149.651|
-2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|61.5698|-149.62|61.5698|-149.62|
-2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |61.6141|-149.457|61.6141|-149.457|
-2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|61.5474|-149.458|61.5474|-149.458|
-2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|61.5825|-149.429|61.5825|-149.429|
-2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|61.5955|-149.423|61.5955|-149.423|
-2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|61.5801|-149.461|61.5801|-149.461|
-2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|61.5785|-149.415|61.5785|-149.415|
-2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|61.6402|-149.34|61.6402|-149.34|
-2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|61.583|-149.457|61.583|-149.457|
-2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|61.5907|-149.295|61.5907|-149.295|
-2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|61.6141|-149.457|61.6141|-149.457|
-2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |61.5933|-149.397|61.5933|-149.397|
-2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|61.6091|-149.77|61.6091|-149.77|
-2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|61.577|-149.411|61.577|-149.411|
-2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|61.6078|-149.322|61.6078|-149.322|
-2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|61.6131|-149.397|61.6131|-149.397|
-2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|61.6648|-149.372|61.6648|-149.372|
-2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |61.5531|-149.651|61.5531|-149.651|
-2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|61.5811|-149.45|61.5811|-149.45|
-2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|61.583|-149.457|61.583|-149.457|
-2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|61.1955|-149.9|61.1955|-149.9|
-2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |61.1844|-149.897|61.1844|-149.897|
-2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|61.1901|-149.892|61.1901|-149.892|
-2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|61.1444|-149.867|61.1444|-149.867|
-2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|61.1955|-149.9|61.1955|-149.9|
-2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|61.1893|-149.887|61.1893|-149.887|
-2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |61.2174|-149.888|61.2174|-149.888|
-2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|61.2227|-149.842|61.2227|-149.842|
-2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|61.1263|-149.872|61.1263|-149.872|
-2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |61.1138|-149.866|61.1138|-149.866|
-2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|61.2157|-149.821|61.2157|-149.821|
-2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |61.2174|-149.888|61.2174|-149.888|
-2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|61.1855|-149.868|61.1855|-149.868|
-2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |61.1951|-149.873|61.1951|-149.873|
-2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|61.1444|-149.867|61.1444|-149.867|
-2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|61.1883|-149.735|61.1883|-149.735|
-2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|61.2161|-149.876|61.2161|-149.876|
-2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|61.2161|-149.876|61.2161|-149.876|
-2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|61.1893|-149.888|61.1893|-149.888|
-2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|61.2174|-149.888|61.2174|-149.888|
-2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |61.1914|-149.886|61.1914|-149.886|
-2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|61.1541|-149.958|61.1541|-149.958|
-2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|61.1259|-149.717|61.1259|-149.717|
-2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|61.2193|-149.902|61.2193|-149.902|
-2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|61.1879|-149.886|61.1879|-149.886|
-2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |61.1372|-149.912|61.1372|-149.912|
-2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |61.1101|-149.857|61.1101|-149.857|
-2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|61.1791|-149.94|61.1791|-149.94|
-2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|61.2031|-149.749|61.2031|-149.749|
-2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|61.1914|-149.877|61.1914|-149.877|
-2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|61.1944|-149.883|61.1944|-149.883|
-2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|61.1923|-149.886|61.1923|-149.886|
-2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |61.2123|-149.854|61.2123|-149.854|
-2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|61.2161|-149.876|61.2161|-149.876|
-2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|61.2171|-149.9|61.2171|-149.9|
-2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|61.1125|-149.861|61.1125|-149.861|
-2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|61.1419|-149.896|61.1419|-149.896|
-2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|61.145|-149.878|61.145|-149.878|
-2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|61.1044|-149.865|61.1044|-149.865|
-2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|61.1444|-149.876|61.1444|-149.876|
-2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|61.1|-149.85|61.1|-149.85|
-2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|61.1105|-149.861|61.1105|-149.861|
-2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|61.1286|-149.957|61.1286|-149.957|
-2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|61.1201|-149.89|61.1201|-149.89|
-2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|61.112|-149.871|61.112|-149.871|
-2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|61.1427|-149.864|61.1427|-149.864|
-2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|61.1212|-149.947|61.1212|-149.947|
-2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |61.1228|-149.939|61.1228|-149.939|
-2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|61.1093|-149.871|61.1093|-149.871|
-2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |61.145|-149.878|61.145|-149.878|
-2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|61.1191|-149.871|61.1191|-149.871|
-2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|61.1003|-149.856|61.1003|-149.856|
-2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|61.1113|-149.872|61.1113|-149.872|
-2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|61.1084|-149.861|61.1084|-149.861|
-2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|61.144|-149.878|61.144|-149.878|
-2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|61.1426|-149.877|61.1426|-149.877|
-2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|61.1173|-149.861|61.1173|-149.861|
-2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|61.1347|-149.914|61.1347|-149.914|
+2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|61.1101|-149.857|61.1101|-149.857|
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt
new file mode 100644
index 0000000..afbd373
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/data/orders-with-locations-part2.txt
@@ -0,0 +1,193 @@
+2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|61.1101|-149.857|61.1101|-149.857|
+2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|61.1101|-149.857|61.1101|-149.857|
+2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |61.1101|-149.857|61.1101|-149.857|
+2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|61.1101|-149.857|61.1101|-149.857|
+2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|61.1101|-149.857|61.1101|-149.857|
+2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|61.1101|-149.857|61.1101|-149.857|
+2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |61.1101|-149.857|61.1101|-149.857|
+2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|61.1101|-149.857|61.1101|-149.857|
+2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |61.1101|-149.857|61.1101|-149.857|
+2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|61.1101|-149.857|61.1101|-149.857|
+2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|61.1101|-149.857|61.1101|-149.857|
+2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|61.1101|-149.857|61.1101|-149.857|
+2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|61.1101|-149.857|61.1101|-149.857|
+2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|61.1101|-149.857|61.1101|-149.857|
+2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|61.1101|-149.857|61.1101|-149.857|
+2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|61.1101|-149.857|61.1101|-149.857|
+2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|61.1101|-149.857|61.1101|-149.857|
+2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|61.2141|-149.864|61.2141|-149.864|
+2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|61.2171|-149.9|61.2171|-149.9|
+2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|61.2171|-149.9|61.2171|-149.9|
+2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|61.2171|-149.9|61.2171|-149.9|
+2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|61.2171|-149.9|61.2171|-149.9|
+2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|61.2171|-149.9|61.2171|-149.9|
+2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|61.2171|-149.9|61.2171|-149.9|
+2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|61.2171|-149.9|61.2171|-149.9|
+2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|61.2171|-149.9|61.2171|-149.9|
+2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|61.2171|-149.9|61.2171|-149.9|
+2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|61.2171|-149.9|61.2171|-149.9|
+2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|61.2171|-149.9|61.2171|-149.9|
+2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |61.2171|-149.9|61.2171|-149.9|
+2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |61.2171|-149.9|61.2171|-149.9|
+2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|61.2171|-149.9|61.2171|-149.9|
+2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|61.2171|-149.9|61.2171|-149.9|
+2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|64.8487|-147.704|64.8487|-147.704|
+2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |64.8486|-147.705|64.8486|-147.705|
+2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |64.8087|-147.71|64.8087|-147.71|
+2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|64.8363|-147.758|64.8363|-147.758|
+2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|64.8476|-147.704|64.8476|-147.704|
+2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|64.8302|-147.744|64.8302|-147.744|
+2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|64.8476|-147.812|64.8476|-147.812|
+2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|64.8144|-147.756|64.8144|-147.756|
+2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |64.8183|-147.778|64.8183|-147.778|
+2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|64.8494|-147.818|64.8494|-147.818|
+2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |64.849|-147.822|64.849|-147.822|
+2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|64.8367|-147.716|64.8367|-147.716|
+2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |64.8127|-147.772|64.8127|-147.772|
+2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|64.8143|-147.751|64.8143|-147.751|
+2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|64.842|-147.721|64.842|-147.721|
+2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|64.8403|-147.714|64.8403|-147.714|
+2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|64.8371|-147.881|64.8371|-147.881|
+2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|64.8151|-147.707|64.8151|-147.707|
+2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|64.8151|-147.707|64.8151|-147.707|
+2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|64.8541|-147.81|64.8541|-147.81|
+2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|64.8878|-147.496|64.8878|-147.496|
+2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|64.8299|-147.728|64.8299|-147.728|
+2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|64.8132|-147.762|64.8132|-147.762|
+2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |64.8372|-147.713|64.8372|-147.713|
+2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|64.7927|-148.036|64.7927|-148.036|
+2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|64.8717|-147.819|64.8717|-147.819|
+2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|64.8527|-147.686|64.8527|-147.686|
+2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|64.8371|-147.811|64.8371|-147.811|
+2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|64.846|-147.705|64.846|-147.705|
+2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|64.9064|-147.726|64.9064|-147.726|
+2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|64.9347|-147.56|64.9347|-147.56|
+2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|64.8861|-147.677|64.8861|-147.677|
+2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|64.8302|-147.744|64.8302|-147.744|
+2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|60.6673|-151.311|60.6673|-151.311|
+2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|60.6997|-151.38|60.6997|-151.38|
+2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |60.5658|-151.244|60.5658|-151.244|
+2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|60.6331|-151.163|60.6331|-151.163|
+2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|60.6331|-151.163|60.6331|-151.163|
+2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|60.6331|-151.163|60.6331|-151.163|
+2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|60.6201|-151.332|60.6201|-151.332|
+2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|60.5004|-151.276|60.5004|-151.276|
+2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|60.6331|-151.163|60.6331|-151.163|
+2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |60.6331|-151.163|60.6331|-151.163|
+2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|60.6901|-151.321|60.6901|-151.321|
+2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|60.6676|-151.29|60.6676|-151.29|
+2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|60.6331|-151.163|60.6331|-151.163|
+2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|60.5632|-151.266|60.5632|-151.266|
+2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|60.6331|-151.163|60.6331|-151.163|
+2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|60.6331|-151.163|60.6331|-151.163|
+2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|60.6509|-151.342|60.6509|-151.342|
+2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |60.5601|-151.107|60.5601|-151.107|
+2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|60.5123|-151.275|60.5123|-151.275|
+2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|60.6076|-151.325|60.6076|-151.325|
+2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|60.6331|-151.163|60.6331|-151.163|
+2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|60.5175|-151.235|60.5175|-151.235|
+2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|60.5535|-151.108|60.5535|-151.108|
+2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|60.5614|-151.275|60.5614|-151.275|
+2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|60.5647|-151.195|60.5647|-151.195|
+2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|60.6331|-151.163|60.6331|-151.163|
+2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|60.6331|-151.163|60.6331|-151.163|
+2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |60.6331|-151.163|60.6331|-151.163|
+2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|60.6331|-151.163|60.6331|-151.163|
+2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|60.6331|-151.163|60.6331|-151.163|
+2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |60.6678|-151.31|60.6678|-151.31|
+2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|60.5003|-151.276|60.5003|-151.276|
+2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|60.6331|-151.163|60.6331|-151.163|
+2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |61.5855|-149.326|61.5855|-149.326|
+2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |61.5979|-149.437|61.5979|-149.437|
+2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|61.6141|-149.457|61.6141|-149.457|
+2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|61.5799|-149.461|61.5799|-149.461|
+2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|61.5845|-149.337|61.5845|-149.337|
+2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|61.5351|-149.558|61.5351|-149.558|
+2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |61.5811|-149.45|61.5811|-149.45|
+2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|61.5793|-149.442|61.5793|-149.442|
+2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|61.5661|-149.313|61.5661|-149.313|
+2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |61.6141|-149.457|61.6141|-149.457|
+2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |61.5786|-149.332|61.5786|-149.332|
+2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|61.5811|-149.45|61.5811|-149.45|
+2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|61.5825|-149.429|61.5825|-149.429|
+2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|61.6141|-149.457|61.6141|-149.457|
+2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|61.5531|-149.651|61.5531|-149.651|
+2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|61.5531|-149.651|61.5531|-149.651|
+2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|61.5698|-149.62|61.5698|-149.62|
+2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |61.6141|-149.457|61.6141|-149.457|
+2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|61.5474|-149.458|61.5474|-149.458|
+2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|61.5825|-149.429|61.5825|-149.429|
+2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|61.5955|-149.423|61.5955|-149.423|
+2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|61.5801|-149.461|61.5801|-149.461|
+2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|61.5785|-149.415|61.5785|-149.415|
+2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|61.6402|-149.34|61.6402|-149.34|
+2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|61.583|-149.457|61.583|-149.457|
+2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|61.5907|-149.295|61.5907|-149.295|
+2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|61.6141|-149.457|61.6141|-149.457|
+2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |61.5933|-149.397|61.5933|-149.397|
+2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|61.6091|-149.77|61.6091|-149.77|
+2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|61.577|-149.411|61.577|-149.411|
+2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|61.6078|-149.322|61.6078|-149.322|
+2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|61.6131|-149.397|61.6131|-149.397|
+2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|61.6648|-149.372|61.6648|-149.372|
+2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |61.5531|-149.651|61.5531|-149.651|
+2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|61.5811|-149.45|61.5811|-149.45|
+2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|61.583|-149.457|61.583|-149.457|
+2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|61.1955|-149.9|61.1955|-149.9|
+2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |61.1844|-149.897|61.1844|-149.897|
+2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|61.1901|-149.892|61.1901|-149.892|
+2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|61.1444|-149.867|61.1444|-149.867|
+2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|61.1955|-149.9|61.1955|-149.9|
+2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|61.1893|-149.887|61.1893|-149.887|
+2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |61.2174|-149.888|61.2174|-149.888|
+2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|61.2227|-149.842|61.2227|-149.842|
+2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|61.1263|-149.872|61.1263|-149.872|
+2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |61.1138|-149.866|61.1138|-149.866|
+2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|61.2157|-149.821|61.2157|-149.821|
+2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |61.2174|-149.888|61.2174|-149.888|
+2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|61.1855|-149.868|61.1855|-149.868|
+2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |61.1951|-149.873|61.1951|-149.873|
+2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|61.1444|-149.867|61.1444|-149.867|
+2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|61.1883|-149.735|61.1883|-149.735|
+2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|61.2161|-149.876|61.2161|-149.876|
+2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|61.2161|-149.876|61.2161|-149.876|
+2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|61.1893|-149.888|61.1893|-149.888|
+2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|61.2174|-149.888|61.2174|-149.888|
+2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |61.1914|-149.886|61.1914|-149.886|
+2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|61.1541|-149.958|61.1541|-149.958|
+2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|61.1259|-149.717|61.1259|-149.717|
+2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|61.2193|-149.902|61.2193|-149.902|
+2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|61.1879|-149.886|61.1879|-149.886|
+2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |61.1372|-149.912|61.1372|-149.912|
+2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |61.1101|-149.857|61.1101|-149.857|
+2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|61.1791|-149.94|61.1791|-149.94|
+2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|61.2031|-149.749|61.2031|-149.749|
+2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|61.1914|-149.877|61.1914|-149.877|
+2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|61.1944|-149.883|61.1944|-149.883|
+2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|61.1923|-149.886|61.1923|-149.886|
+2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |61.2123|-149.854|61.2123|-149.854|
+2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|61.2161|-149.876|61.2161|-149.876|
+2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|61.2171|-149.9|61.2171|-149.9|
+2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|61.1125|-149.861|61.1125|-149.861|
+2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|61.1419|-149.896|61.1419|-149.896|
+2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|61.145|-149.878|61.145|-149.878|
+2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|61.1044|-149.865|61.1044|-149.865|
+2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|61.1444|-149.876|61.1444|-149.876|
+2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|61.1|-149.85|61.1|-149.85|
+2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|61.1105|-149.861|61.1105|-149.861|
+2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|61.1286|-149.957|61.1286|-149.957|
+2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|61.1201|-149.89|61.1201|-149.89|
+2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|61.112|-149.871|61.112|-149.871|
+2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|61.1427|-149.864|61.1427|-149.864|
+2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|61.1212|-149.947|61.1212|-149.947|
+2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |61.1228|-149.939|61.1228|-149.939|
+2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|61.1093|-149.871|61.1093|-149.871|
+2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |61.145|-149.878|61.145|-149.878|
+2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|61.1191|-149.871|61.1191|-149.871|
+2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|61.1003|-149.856|61.1003|-149.856|
+2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|61.1113|-149.872|61.1113|-149.872|
+2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|61.1084|-149.861|61.1084|-149.861|
+2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|61.144|-149.878|61.144|-149.878|
+2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|61.1426|-149.877|61.1426|-149.877|
+2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|61.1173|-149.861|61.1173|-149.861|
+2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|61.1347|-149.914|61.1347|-149.914|
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
index f019b44..432877b 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -1,96 +1,109 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
-  <artifactId>hyracks-integration-tests</artifactId>
-  <name>hyracks-integration-tests</name>
-  <parent>
-    <groupId>edu.uci.ics.hyracks</groupId>
-    <artifactId>hyracks-examples</artifactId>
-    <version>0.2.4-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <version>2.0.2</version>
-        <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-          <fork>true</fork>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-  	<dependency>
-  		<groupId>junit</groupId>
-  		<artifactId>junit</artifactId>
-  		<version>4.8.1</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-        <groupId>edu.uci.ics.hyracks</groupId>
-        <artifactId>hyracks-client</artifactId>
-        <version>0.2.4-SNAPSHOT</version>
-        <type>jar</type>
-        <scope>compile</scope>
-    </dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-dataflow-std</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-cc</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-invertedindex</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-storage-am-rtree</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>compile</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  		<type>jar</type>
-  		<scope>test</scope>
-  	</dependency>
-  	<dependency>
-  		<groupId>edu.uci.ics.hyracks</groupId>
-  		<artifactId>hyracks-data-std</artifactId>
-  		<version>0.2.4-SNAPSHOT</version>
-  	</dependency>
-  </dependencies>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+	<modelVersion>4.0.0</modelVersion>
+	<groupId>edu.uci.ics.hyracks.examples</groupId>
+	<artifactId>hyracks-integration-tests</artifactId>
+	<name>hyracks-integration-tests</name>
+	<parent>
+		<groupId>edu.uci.ics.hyracks</groupId>
+		<artifactId>hyracks-examples</artifactId>
+		<version>0.2.4-SNAPSHOT</version>
+	</parent>
+	<build>
+		<plugins>
+			<plugin>
+				<groupId>org.apache.maven.plugins</groupId>
+				<artifactId>maven-compiler-plugin</artifactId>
+				<version>2.0.2</version>
+				<configuration>
+					<source>1.7</source>
+					<target>1.7</target>
+				</configuration>
+			</plugin>
+		</plugins>
+	</build>
+	<dependencies>
+		<dependency>
+			<groupId>junit</groupId>
+			<artifactId>junit</artifactId>
+			<version>4.8.1</version>
+			<type>jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-dataflow-std</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-cc</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-control-nc</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-btree</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-rtree</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-lsm-btree</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-lsm-rtree</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-storage-am-lsm-invertedindex</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>compile</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-test-support</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+			<type>jar</type>
+			<scope>test</scope>
+		</dependency>
+		<dependency>
+			<groupId>edu.uci.ics.hyracks</groupId>
+			<artifactId>hyracks-data-std</artifactId>
+			<version>0.2.4-SNAPSHOT</version>
+		</dependency>
+  	    <dependency>
+            <groupId>edu.uci.ics.hyracks</groupId>
+            <artifactId>hyracks-client</artifactId>
+            <version>0.2.4-SNAPSHOT</version>
+            <type>jar</type>
+            <scope>compile</scope>
+        </dependency>
+	</dependencies>
 </project>
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
similarity index 60%
rename from hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
index 34a1cd3..0af1193 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/AbstractBTreeOperatorTest.java
@@ -13,24 +13,20 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.tests.btree;
+package edu.uci.ics.hyracks.tests.am.btree;
 
 import java.io.DataOutput;
 import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
 
-import org.junit.AfterClass;
+import org.junit.After;
 import org.junit.Before;
-import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
@@ -47,71 +43,79 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
-public class BTreeSecondaryIndexInsertOperatorTest extends AbstractIntegrationTest {
+public abstract class AbstractBTreeOperatorTest extends AbstractIntegrationTest {
     static {
         TestStorageManagerComponentHolder.init(8192, 20, 20);
     }
 
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
+    protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    protected final IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
+    protected IIndexDataflowHelperFactory dataflowHelperFactory;
 
     // field, type and key declarations for primary index
-    private int primaryFieldCount = 6;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+    protected final int primaryFieldCount = 6;
+    protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    protected final int primaryKeyFieldCount = 1;
+    protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+    protected final int[] primaryBloomFilterKeyFields = new int[primaryKeyFieldCount];
 
-    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
-
-    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+    protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
             UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
             UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
             UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
+    // to be set by subclasses
+    protected String primaryFileName;
+    protected IFileSplitProvider primarySplitProvider;
+
     // field, type and key declarations for secondary indexes
-    private int secondaryFieldCount = 2;
-    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-    private int secondaryKeyFieldCount = 2;
-    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+    protected final int secondaryFieldCount = 2;
+    protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+    protected final int secondaryKeyFieldCount = 2;
+    protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+    protected final int[] secondaryBloomFilterKeyFields = new int[secondaryKeyFieldCount];
 
-    private static String secondaryBtreeName = "secondary" + simpleDateFormat.format(new Date());
-    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryBtreeName;
-
-    private IFileSplitProvider secondaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
-
-    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+    protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
             UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
+    protected String secondaryFileName;
+    protected IFileSplitProvider secondarySplitProvider;
+
+    protected ITreeIndexOperatorTestHelper testHelper;
+
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new BTreeOperatorTestHelper();
+    }
+
     @Before
     public void setup() throws Exception {
+        testHelper = createTestHelper();
+        dataflowHelperFactory = createDataFlowHelperFactory();
+        primaryFileName = testHelper.getPrimaryIndexName();
+        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
+                new File(primaryFileName))) });
+        secondaryFileName = testHelper.getSecondaryIndexName();
+        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                new FileReference(new File(secondaryFileName))) });
+
         // field, type and key declarations for primary index
         primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
@@ -120,31 +124,32 @@
         primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
         primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
         primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+        primaryBloomFilterKeyFields[0] = 0;
 
         // field, type and key declarations for secondary indexes
         secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
         secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
         secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-        createSecondaryIndex();
-        loadSecondaryIndexTest();
-        insertPipelineTest();
+        secondaryBloomFilterKeyFields[0] = 0;
+        secondaryBloomFilterKeyFields[1] = 1;
     }
 
+    protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory();
+
     public void createPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
         TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, dataflowHelperFactory, localResourceFactoryProvider,
+                NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
-    
-    public void loadPrimaryIndexTest() throws Exception {
+
+    protected void loadPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
@@ -172,8 +177,9 @@
 
         int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, fieldPermutation, 0.7f, true, 1000L, dataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
@@ -186,15 +192,17 @@
 
     public void createSecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, dataflowHelperFactory,
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
         spec.addRoot(secondaryCreateOp);
         runTest(spec);
     }
-    
-    public void loadSecondaryIndexTest() throws Exception {
+
+    protected void loadSecondaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
 
         // build dummy tuple containing nothing
@@ -218,21 +226,23 @@
 
         // scan primary index
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, lowKeyFields,
-                highKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
 
         // sort based on secondary keys
         ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                primaryRecDesc);
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                        PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) }, primaryRecDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
 
         // load secondary index
         int[] fieldPermutation = { 3, 0 };
         TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, fieldPermutation, 0.7f, true, 1000L,
+                dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
@@ -243,8 +253,8 @@
         runTest(spec);
     }
 
-    public void insertPipelineTest() throws Exception {
-
+    protected void insertPipeline(boolean useUpsert) throws Exception {
+        IndexOperation pipelineOperation = useUpsert ? IndexOperation.UPSERT : IndexOperation.INSERT;
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
@@ -268,17 +278,17 @@
         // insert into primary index
         int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
         TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, indexRegistryProvider, primaryBtreeSplitProvider,
-                primaryTypeTraits, primaryComparatorFactories,
-                primaryFieldPermutation, IndexOp.INSERT, dataflowHelperFactory, null, NoOpOperationCallbackProvider.INSTANCE);
+                spec, ordersDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, primaryBloomFilterKeyFields, primaryFieldPermutation, pipelineOperation,
+                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeInsertOp, NC1_ID);
 
         // first secondary index
         int[] fieldPermutationB = { 4, 0 };
         TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
-                spec, ordersDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
-                secondaryTypeTraits,
-                secondaryComparatorFactories, fieldPermutationB, IndexOp.INSERT, dataflowHelperFactory, null, NoOpOperationCallbackProvider.INSTANCE);
+                spec, ordersDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, fieldPermutationB, pipelineOperation,
+                dataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
 
         NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
@@ -294,74 +304,24 @@
         runTest(spec);
     }
 
-    @Test
-    public void searchUpdatedSecondaryIndexTest() throws Exception {
+    protected void destroyPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
-
-        // build tuple containing search keys (only use the first key as search
-        // key)
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] secondaryLowKeyFields = { 0 };
-        int[] secondaryHighKeyFields = { 1 };
-
-        // search secondary index
-        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
-                secondaryTypeTraits,
-                secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
-
-        // second field from the tuples coming from secondary index
-        int[] primaryLowKeyFields = { 1 };
-        // second field from the tuples coming from secondary index
-        int[] primaryHighKeyFields = { 1 };
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, primaryLowKeyFields,
-                primaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
+        IndexDropOperatorDescriptor primaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, primarySplitProvider, dataflowHelperFactory);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryDropOp, NC1_ID);
+        spec.addRoot(primaryDropOp);
         runTest(spec);
     }
 
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-
-        File secondary = new File(secondaryFileName);
-        secondary.deleteOnExit();
+    protected void destroySecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IndexDropOperatorDescriptor secondaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, secondarySplitProvider, dataflowHelperFactory);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryDropOp, NC1_ID);
+        spec.addRoot(secondaryDropOp);
+        runTest(spec);
     }
-}
\ No newline at end of file
+
+    @After
+    public abstract void cleanup() throws Exception;
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
new file mode 100644
index 0000000..337bfda
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeOperatorTestHelper.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.tests.am.common.TreeOperatorTestHelper;
+
+public class BTreeOperatorTestHelper extends TreeOperatorTestHelper {
+
+    public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return new BTreeDataflowHelperFactory();
+    }
+
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
new file mode 100644
index 0000000..7e1c42a
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -0,0 +1,99 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreePrimaryIndexScanOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+    }
+
+    @Test
+    public void scanPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..64bc657
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreePrimaryIndexSearchOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+    }
+
+    @Test
+    public void searchPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing low and high search key
+        // high key and low key
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = { 0 };
+        int[] highKeyFields = { 1 };
+
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
new file mode 100644
index 0000000..c8d95ce
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreePrimaryIndexStatsOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+    }
+
+    @Test
+    public void showPrimaryIndexStats() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, dataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..5a600a6
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreeSecondaryIndexInsertOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+        insertPipeline(false);
+    }
+
+    @Test
+    public void searchUpdatedSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing search keys (only use the first key as search
+        // key)
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] secondaryLowKeyFields = { 0 };
+        int[] secondaryHighKeyFields = { 1 };
+
+        // search secondary index
+        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
+                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
+
+        // second field from the tuples coming from secondary index
+        int[] primaryLowKeyFields = { 1 };
+        // second field from the tuples coming from secondary index
+        int[] primaryHighKeyFields = { 1 };
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..e3005cf
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreeSecondaryIndexSearchOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+    }
+
+    @Test
+    public void searchSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing search keys (only use the first key as search
+        // key)
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] secondaryLowKeyFields = { 0 };
+        int[] secondaryHighKeyFields = { 1 };
+
+        // search secondary index
+        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
+                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
+
+        int[] primaryLowKeyFields = { 1 }; // second field from the tuples
+        // coming from secondary index
+        int[] primaryHighKeyFields = { 1 }; // second field from the tuples
+        // coming from secondary index
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
new file mode 100644
index 0000000..758e926
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/btree/BTreeSecondaryIndexUpsertOperatorTest.java
@@ -0,0 +1,107 @@
+package edu.uci.ics.hyracks.tests.am.btree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+
+public class BTreeSecondaryIndexUpsertOperatorTest extends AbstractBTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+        insertPipeline(true);
+    }
+
+    @Test
+    public void searchUpdatedSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple containing search keys (only use the first key as search
+        // key)
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        // low key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
+        tb.addFieldEndOffset();
+        // high key
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] secondaryLowKeyFields = { 0 };
+        int[] secondaryHighKeyFields = { 1 };
+
+        // search secondary index
+        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+                secondaryRecDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, secondaryBloomFilterKeyFields, secondaryLowKeyFields,
+                secondaryHighKeyFields, true, true, dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
+
+        // second field from the tuples coming from secondary index
+        int[] primaryLowKeyFields = { 1 };
+        // second field from the tuples coming from secondary index
+        int[] primaryHighKeyFields = { 1 };
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                primaryBloomFilterKeyFields, primaryLowKeyFields, primaryHighKeyFields, true, true,
+                dataflowHelperFactory, false, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((BTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
new file mode 100644
index 0000000..f0c4519
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/ITreeIndexOperatorTestHelper.java
@@ -0,0 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.common;
+
+public interface ITreeIndexOperatorTestHelper {
+    public String getPrimaryIndexName();
+
+    public String getSecondaryIndexName();
+
+    public void cleanup(String primaryFileName, String secondaryFileName);
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
new file mode 100644
index 0000000..80b5285
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/LSMTreeOperatorTestHelper.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.common;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.Date;
+
+import edu.uci.ics.hyracks.api.io.IODeviceHandle;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+
+public class LSMTreeOperatorTestHelper extends TreeOperatorTestHelper {
+
+    protected final IOManager ioManager;
+
+    public LSMTreeOperatorTestHelper(IOManager ioManager) {
+        this.ioManager = ioManager;
+    }
+
+    public String getPrimaryIndexName() {
+        return "primary" + simpleDateFormat.format(new Date());
+    }
+
+    public String getSecondaryIndexName() {
+        return "secondary" + simpleDateFormat.format(new Date());
+    }
+
+    @Override
+    public void cleanup(String primaryFileName, String secondaryFileName) {
+        for (IODeviceHandle dev : ioManager.getIODevices()) {
+            File primaryDir = new File(dev.getPath(), primaryFileName);
+            cleanupDir(primaryDir);
+            File secondaryDir = new File(dev.getPath(), secondaryFileName);
+            cleanupDir(secondaryDir);
+        }
+    }
+
+    private void cleanupDir(File dir) {
+        if (!dir.exists()) {
+            return;
+        }
+        FilenameFilter filter = new FilenameFilter() {
+            public boolean accept(File dir, String name) {
+                return !name.startsWith(".");
+            }
+        };
+        String[] files = dir.list(filter);
+        if (files != null) {
+            for (String fileName : files) {
+                File file = new File(dir.getPath() + File.separator + fileName);
+                file.delete();
+            }
+        }
+        dir.delete();
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
new file mode 100644
index 0000000..935724b
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/common/TreeOperatorTestHelper.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.common;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class TreeOperatorTestHelper implements ITreeIndexOperatorTestHelper {
+
+    protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final String sep = System.getProperty("file.separator");
+    protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
+    protected static int DEFAULT_MEM_NUM_PAGES = 1000;
+
+    public String getPrimaryIndexName() {
+        return System.getProperty("java.io.tmpdir") + sep + "primary" + simpleDateFormat.format(new Date());
+    }
+
+    public String getSecondaryIndexName() {
+        return System.getProperty("java.io.tmpdir") + sep + "secondary" + simpleDateFormat.format(new Date());
+    }
+
+    @Override
+    public void cleanup(String primaryFileName, String secondaryFileName) {
+        File primary = new File(primaryFileName);
+        if (primary.exists()) {
+            primary.deleteOnExit();
+        }
+        File secondary = new File(secondaryFileName);
+        if (secondary.exists()) {
+            secondary.deleteOnExit();
+        }
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java
similarity index 63%
rename from hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java
index d1071a3..808afac 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/WordInvertedIndexTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/AbstractfWordInvertedIndexTest.java
@@ -1,5 +1,5 @@
 /*
- * Copyright 2009-2010 by The Regents of the University of California
+ * Copyright 2009-2012 by The Regents of the University of California
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * you may obtain a copy of the License from
@@ -13,14 +13,13 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.tests.invertedindex;
+package edu.uci.ics.hyracks.tests.am.invertedindex;
 
 import java.io.DataOutput;
 import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
-import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -30,7 +29,6 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
@@ -49,97 +47,95 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.InvertedIndexSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifierFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.api.IInvertedIndexSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.search.ConjunctiveSearchModifierFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.common.file.ILocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
-public class WordInvertedIndexTest extends AbstractIntegrationTest {
+@SuppressWarnings("rawtypes")
+public abstract class AbstractfWordInvertedIndexTest extends AbstractIntegrationTest {
     static {
         TestStorageManagerComponentHolder.init(8192, 20, 20);
     }
 
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
+    protected static final int MERGE_THRESHOLD = 3;
 
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-    private final static String dateString = simpleDateFormat.format(new Date());
-    private final static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree"
-            + dateString;
-    private final static String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree"
-            + dateString;
-    private final static String invListsFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexLists"
-            + dateString;
+    protected IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    protected IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
+    protected IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
+    protected IIndexDataflowHelperFactory invertedIndexDataflowHelperFactory;
 
-    private IFileSplitProvider primaryFileSplitProvider = new ConstantFileSplitProvider(
+    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final static String sep = System.getProperty("file.separator");
+    protected final String dateString = simpleDateFormat.format(new Date());
+    protected final String primaryFileName = System.getProperty("java.io.tmpdir") + sep + "primaryBtree" + dateString;
+    protected final String btreeFileName = System.getProperty("java.io.tmpdir") + sep + "invIndexBtree" + dateString;
+
+    protected IFileSplitProvider primaryFileSplitProvider = new ConstantFileSplitProvider(
             new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-    private IFileSplitProvider btreeFileSplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(
-            NC1_ID, new FileReference(new File(btreeFileName))) });
-    private IFileSplitProvider invListsFileSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(invListsFileName))) });
+    protected IFileSplitProvider btreeFileSplitProvider = new ConstantFileSplitProvider(
+            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(btreeFileName))) });
 
     // Primary BTree index.
-    private int primaryFieldCount = 2;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+    protected int primaryFieldCount = 2;
+    protected ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    protected int primaryKeyFieldCount = 1;
+    protected IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+    protected RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
             IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
 
     // Inverted index BTree dictionary.
-    private ITypeTraits[] tokenTypeTraits = new ITypeTraits[1];
-    private IBinaryComparatorFactory[] tokenComparatorFactories = new IBinaryComparatorFactory[1];
+    protected ITypeTraits[] tokenTypeTraits;
+    protected IBinaryComparatorFactory[] tokenComparatorFactories;
 
     // Inverted index stuff.
-    private int invListElementFieldCount = 1;
-    private ITypeTraits[] invListsTypeTraits = new ITypeTraits[invListElementFieldCount];
-    private IBinaryComparatorFactory[] invListsComparatorFactories = new IBinaryComparatorFactory[invListElementFieldCount];
-    private RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-    private RecordDescriptor invListsRecDesc = new RecordDescriptor(
+    protected int invListElementFieldCount = 1;
+    protected ITypeTraits[] invListsTypeTraits = new ITypeTraits[invListElementFieldCount];
+    protected IBinaryComparatorFactory[] invListsComparatorFactories = new IBinaryComparatorFactory[invListElementFieldCount];
+    protected RecordDescriptor invListsRecDesc = new RecordDescriptor(
             new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
+    protected RecordDescriptor tokenizerRecDesc;
 
     // Tokenizer stuff.
-    private ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-    private IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
+    protected ITokenFactory tokenFactory = new UTF8WordTokenFactory();
+    protected IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
             tokenFactory);
 
+    // Sorting stuff.
+    IBinaryComparatorFactory[] sortComparatorFactories;
+
     @Before
     public void setup() throws Exception {
+        prepare();
+
         // Field declarations and comparators for primary BTree index.
         primaryTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
         primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
         primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
-        // Field declarations and comparators for tokens.
-        tokenTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        tokenComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
         // Field declarations and comparators for inverted lists.
         invListsTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
         invListsComparatorFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
@@ -151,16 +147,33 @@
         loadInvertedIndex();
     }
 
+    protected abstract void prepare();
+
+    protected abstract boolean addNumTokensKey();
+
     public void createPrimaryIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
         TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
+                lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits, primaryComparatorFactories, null,
+                btreeDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
         spec.addRoot(primaryCreateOp);
         runTest(spec);
     }
 
+    public void createInvertedIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        ILocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        LSMInvertedIndexCreateOperatorDescriptor invIndexCreateOp = new LSMInvertedIndexCreateOperatorDescriptor(spec,
+                storageManager, btreeFileSplitProvider, lcManagerProvider, tokenTypeTraits, tokenComparatorFactories,
+                invListsTypeTraits, invListsComparatorFactories, tokenizerFactory, invertedIndexDataflowHelperFactory,
+                localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexCreateOp, NC1_ID);
+        spec.addRoot(invIndexCreateOp);
+        runTest(spec);
+    }
+
     @Test
     public void testConjunctiveSearcher() throws Exception {
         IInvertedIndexSearchModifierFactory conjunctiveSearchModifierFactory = new ConjunctiveSearchModifierFactory();
@@ -185,9 +198,9 @@
     private IOperatorDescriptor createPrimaryBulkLoadOp(JobSpecification spec) {
         int[] fieldPermutation = { 0, 1 };
         TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, fieldPermutation, 0.7f, btreeDataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, null, fieldPermutation, 0.7f, true, 1000L, btreeDataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
         return primaryBtreeBulkLoad;
     }
@@ -212,9 +225,9 @@
         int[] lowKeyFields = null; // - infinity
         int[] highKeyFields = null; // + infinity
         BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryFileSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory, false,
-                NoOpOperationCallbackProvider.INSTANCE);
+                storageManager, lcManagerProvider, primaryFileSplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, null, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory,
+                false, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
         return primaryBtreeSearchOp;
     }
@@ -234,12 +247,9 @@
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
         IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, printer, 0);
         spec.addRoot(printer);
@@ -249,50 +259,41 @@
     private IOperatorDescriptor createExternalSortOp(JobSpecification spec, int[] sortFields,
             RecordDescriptor outputRecDesc) {
         ExternalSortOperatorDescriptor externalSortOp = new ExternalSortOperatorDescriptor(spec, 1000, sortFields,
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
-                        PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) }, outputRecDesc);
+                sortComparatorFactories, outputRecDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, externalSortOp, NC1_ID);
         return externalSortOp;
     }
 
-    private IOperatorDescriptor createBinaryTokenizerOp(JobSpecification spec, int[] tokenFields, int[] keyFields) {
+    private IOperatorDescriptor createBinaryTokenizerOp(JobSpecification spec, int docField, int[] keyFields) {
         BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenizerRecDesc, tokenizerFactory, tokenFields, keyFields);
+                tokenizerRecDesc, tokenizerFactory, docField, keyFields, addNumTokensKey());
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
         return binaryTokenizer;
     }
 
     private IOperatorDescriptor createInvertedIndexBulkLoadOp(JobSpecification spec, int[] fieldPermutation) {
-        InvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new InvertedIndexBulkLoadOperatorDescriptor(spec,
-                fieldPermutation, storageManager, btreeFileSplitProvider, invListsFileSplitProvider,
-                indexRegistryProvider, tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits,
-                invListsComparatorFactories, tokenizerFactory, btreeDataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
+        LSMInvertedIndexBulkLoadOperatorDescriptor invIndexBulkLoadOp = new LSMInvertedIndexBulkLoadOperatorDescriptor(
+                spec, fieldPermutation, true, 1000L, storageManager, btreeFileSplitProvider, lcManagerProvider,
+                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
+                tokenizerFactory, invertedIndexDataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexBulkLoadOp, NC1_ID);
         return invIndexBulkLoadOp;
     }
 
-    public void createInvertedIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        InvertedIndexCreateOperatorDescriptor invIndexCreateOp = new InvertedIndexCreateOperatorDescriptor(spec,
-                storageManager, btreeFileSplitProvider, invListsFileSplitProvider, indexRegistryProvider,
-                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
-                tokenizerFactory, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexCreateOp, NC1_ID);
-        spec.addRoot(invIndexCreateOp);
-        runTest(spec);
-    }
-
     public void loadInvertedIndex() throws Exception {
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor keyProviderOp = createScanKeyProviderOp(spec);
         IOperatorDescriptor primaryScanOp = createPrimaryScanOp(spec);
-        int[] tokenFields = { 1 };
+        int docField = 1;
         int[] keyFields = { 0 };
-        IOperatorDescriptor binaryTokenizerOp = createBinaryTokenizerOp(spec, tokenFields, keyFields);
-        int[] sortFields = { 0, 1 };
+        IOperatorDescriptor binaryTokenizerOp = createBinaryTokenizerOp(spec, docField, keyFields);
+        int[] sortFields = new int[sortComparatorFactories.length];
+        int[] fieldPermutation = new int[sortComparatorFactories.length];
+        for (int i = 0; i < sortFields.length; i++) {
+            sortFields[i] = i;
+            fieldPermutation[i] = i;
+        }
         IOperatorDescriptor externalSortOp = createExternalSortOp(spec, sortFields, tokenizerRecDesc);
-        int[] fieldPermutation = { 0, 1 };
         IOperatorDescriptor invIndexBulkLoadOp = createInvertedIndexBulkLoadOp(spec, fieldPermutation);
         spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryScanOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), primaryScanOp, 0, binaryTokenizerOp, 0);
@@ -320,11 +321,11 @@
 
     private IOperatorDescriptor createInvertedIndexSearchOp(JobSpecification spec,
             IInvertedIndexSearchModifierFactory searchModifierFactory) {
-        InvertedIndexSearchOperatorDescriptor invIndexSearchOp = new InvertedIndexSearchOperatorDescriptor(spec, 0,
-                storageManager, btreeFileSplitProvider, invListsFileSplitProvider, indexRegistryProvider,
-                tokenTypeTraits, tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
-                btreeDataflowHelperFactory, tokenizerFactory, searchModifierFactory, invListsRecDesc, false,
-                NoOpOperationCallbackProvider.INSTANCE);
+        LSMInvertedIndexSearchOperatorDescriptor invIndexSearchOp = new LSMInvertedIndexSearchOperatorDescriptor(spec,
+                0, storageManager, btreeFileSplitProvider, lcManagerProvider, tokenTypeTraits,
+                tokenComparatorFactories, invListsTypeTraits, invListsComparatorFactories,
+                invertedIndexDataflowHelperFactory, tokenizerFactory, searchModifierFactory, invListsRecDesc, false,
+                NoOpOperationCallbackFactory.INSTANCE);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, invIndexSearchOp, NC1_ID);
         return invIndexSearchOp;
     }
@@ -334,25 +335,12 @@
         JobSpecification spec = new JobSpecification();
         IOperatorDescriptor queryProviderOp = createQueryProviderOp(spec, queryString);
         IOperatorDescriptor invIndexSearchOp = createInvertedIndexSearchOp(spec, searchModifierFactory);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         spec.connect(new OneToOneConnectorDescriptor(spec), queryProviderOp, 0, invIndexSearchOp, 0);
         spec.connect(new OneToOneConnectorDescriptor(spec), invIndexSearchOp, 0, printer, 0);
         spec.addRoot(printer);
         runTest(spec);
     }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        File btree = new File(btreeFileName);
-        File invLists = new File(invListsFileName);
-        primary.deleteOnExit();
-        btree.deleteOnExit();
-        invLists.deleteOnExit();
-    }
 }
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java
similarity index 64%
rename from hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
rename to hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java
index 600b54b..47480da 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/BinaryTokenizerOperatorTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/BinaryTokenizerOperatorTest.java
@@ -1,4 +1,4 @@
-package edu.uci.ics.hyracks.tests.invertedindex;
+package edu.uci.ics.hyracks.tests.am.invertedindex;
 
 import java.io.File;
 
@@ -8,10 +8,10 @@
 import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
@@ -22,19 +22,27 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.tokenizers.UTF8WordTokenFactory;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
 
 public class BinaryTokenizerOperatorTest extends AbstractIntegrationTest {
 
     @Test
     public void tokenizerTest() throws Exception {
+        test(false);
+    }
+
+    @Test
+    public void tokenizerWithNumTokensTest() throws Exception {
+        test(true);
+    }
+
+    private void test(boolean addNumTokensKey) throws Exception {
         JobSpecification spec = new JobSpecification();
 
         FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
@@ -48,23 +56,27 @@
                         UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
 
-        RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+        RecordDescriptor tokenizerRecDesc;
+        if (!addNumTokensKey) {
+            tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                    UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+        } else {
+            tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                    UTF8StringSerializerDeserializer.INSTANCE, ShortSerializerDeserializer.INSTANCE,
+                    IntegerSerializerDeserializer.INSTANCE });
+        }
 
         ITokenFactory tokenFactory = new UTF8WordTokenFactory();
         IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
                 tokenFactory);
-        int[] tokenFields = { 1 };
         int[] keyFields = { 0 };
         BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenizerRecDesc, tokenizerFactory, tokenFields, keyFields);
+                tokenizerRecDesc, tokenizerFactory, 1, keyFields, addNumTokensKey);
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
 
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
         PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
 
         spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
new file mode 100644
index 0000000..62d4362
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/PartitionedWordInvertedIndexTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.invertedindex;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.ShortPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.ShortSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.PartitionedLSMInvertedIndexDataflowHelperFactory;
+
+public class PartitionedWordInvertedIndexTest extends AbstractfWordInvertedIndexTest {
+
+    @Override
+    protected void prepare() {
+        // Field declarations and comparators for tokens.
+        tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS, ShortPointable.TYPE_TRAITS };
+        tokenComparatorFactories = new IBinaryComparatorFactory[] {
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(ShortPointable.FACTORY) };
+
+        tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, ShortSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE });
+
+        sortComparatorFactories = new IBinaryComparatorFactory[] {
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(ShortPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
+
+        invertedIndexDataflowHelperFactory = new PartitionedLSMInvertedIndexDataflowHelperFactory(
+                new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
+                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE,
+                DEFAULT_MEM_NUM_PAGES);
+    }
+
+    @Override
+    protected boolean addNumTokensKey() {
+        return true;
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
new file mode 100644
index 0000000..c35c6c9
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/invertedindex/WordInvertedIndexTest.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2012 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.invertedindex;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.invertedindex.dataflow.LSMInvertedIndexDataflowHelperFactory;
+
+public class WordInvertedIndexTest extends AbstractfWordInvertedIndexTest {
+
+    @Override
+    protected void prepare() {
+        // Field declarations and comparators for tokens.
+        tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
+        tokenComparatorFactories = new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory
+                .of(UTF8StringPointable.FACTORY) };
+
+        tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+        sortComparatorFactories = new IBinaryComparatorFactory[] {
+                PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY),
+                PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY) };
+
+        invertedIndexDataflowHelperFactory = new LSMInvertedIndexDataflowHelperFactory(new ConstantMergePolicyProvider(
+                MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
+                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+    }
+
+    @Override
+    protected boolean addNumTokensKey() {
+        return false;
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
new file mode 100644
index 0000000..912ab0e
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeOperatorTestHelper.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.btree;
+
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
+import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
+
+public class LSMBTreeOperatorTestHelper extends LSMTreeOperatorTestHelper {
+
+    private static final int MERGE_THRESHOLD = 3;
+
+    public LSMBTreeOperatorTestHelper(IOManager ioManager) {
+        super(ioManager);
+    }
+
+    public IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return new LSMBTreeDataflowHelperFactory(new ConstantMergePolicyProvider(MERGE_THRESHOLD),
+                ThreadCountingOperationTrackerFactory.INSTANCE, SynchronousSchedulerProvider.INSTANCE,
+                NoOpIOOperationCallback.INSTANCE, DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+    }
+
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
new file mode 100644
index 0000000..d751399
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexScanOperatorTest.java
@@ -0,0 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.btree;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.btree.BTreePrimaryIndexScanOperatorTest;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+
+public class LSMBTreePrimaryIndexScanOperatorTest extends BTreePrimaryIndexScanOperatorTest {
+
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..bdcf3f6
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreePrimaryIndexSearchOperatorTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.btree;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.btree.BTreePrimaryIndexSearchOperatorTest;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+
+public class LSMBTreePrimaryIndexSearchOperatorTest extends BTreePrimaryIndexSearchOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..5ba7279
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.btree;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.btree.BTreeSecondaryIndexInsertOperatorTest;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+
+public class LSMBTreeSecondaryIndexInsertOperatorTest extends BTreeSecondaryIndexInsertOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..c1b1cd8
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/btree/LSMBTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.btree;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.btree.BTreeSecondaryIndexSearchOperatorTest;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+
+public class LSMBTreeSecondaryIndexSearchOperatorTest extends BTreeSecondaryIndexSearchOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMBTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory() {
+        return ((LSMBTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
new file mode 100644
index 0000000..84b34b7
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeOperatorTestHelper.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
+
+public class LSMRTreeOperatorTestHelper extends LSMTreeOperatorTestHelper {
+
+    private static final int MERGE_THRESHOLD = 3;
+
+    public LSMRTreeOperatorTestHelper(IOManager ioManager) {
+        super(ioManager);
+    }
+
+    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return new LSMRTreeDataflowHelperFactory(valueProviderFactories, rtreePolicyType, btreeComparatorFactories,
+                new ConstantMergePolicyProvider(MERGE_THRESHOLD), ThreadCountingOperationTrackerFactory.INSTANCE,
+                SynchronousSchedulerProvider.INSTANCE, NoOpIOOperationCallback.INSTANCE, linearizerCmpFactory,
+                DEFAULT_MEM_PAGE_SIZE, DEFAULT_MEM_NUM_PAGES);
+    }
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..99eb6c3
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexInsertOperatorTest;
+
+public class LSMRTreeSecondaryIndexInsertOperatorTest extends RTreeSecondaryIndexInsertOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((LSMRTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..e1e6b04
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexSearchOperatorTest;
+
+public class LSMRTreeSecondaryIndexSearchOperatorTest extends RTreeSecondaryIndexSearchOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMRTreeOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((LSMRTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
new file mode 100644
index 0000000..bb31dcd
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesOperatorTestHelper.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ConstantMergePolicyProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.NoOpIOOperationCallback;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.SynchronousSchedulerProvider;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.ThreadCountingOperationTrackerFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.dataflow.LSMRTreeWithAntiMatterTuplesDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.tests.am.common.LSMTreeOperatorTestHelper;
+
+public class LSMRTreeWithAntiMatterTuplesOperatorTestHelper extends LSMTreeOperatorTestHelper {
+
+    private static final int MERGE_THRESHOLD = 3;
+
+    public LSMRTreeWithAntiMatterTuplesOperatorTestHelper(IOManager ioManager) {
+        super(ioManager);
+    }
+
+    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return new LSMRTreeWithAntiMatterTuplesDataflowHelperFactory(valueProviderFactories, rtreePolicyType,
+                btreeComparatorFactories, new ConstantMergePolicyProvider(MERGE_THRESHOLD),
+                ThreadCountingOperationTrackerFactory.INSTANCE, SynchronousSchedulerProvider.INSTANCE,
+                NoOpIOOperationCallback.INSTANCE, linearizerCmpFactory);
+    }
+
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..bbf4dd3
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexInsertOperatorTest;
+
+public class LSMRTreeWithAntiMatterTuplesSecondaryIndexInsertOperatorTest extends RTreeSecondaryIndexInsertOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((LSMRTreeWithAntiMatterTuplesOperatorTestHelper) testHelper).createDataFlowHelperFactory(
+                secondaryValueProviderFactories, rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..b615636
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/lsm/rtree/LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.lsm.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import edu.uci.ics.hyracks.tests.am.rtree.RTreeSecondaryIndexSearchOperatorTest;
+
+public class LSMRTreeWithAntiMatterTuplesSecondaryIndexSearchOperatorTest extends RTreeSecondaryIndexSearchOperatorTest {
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new LSMRTreeWithAntiMatterTuplesOperatorTestHelper(TestStorageManagerComponentHolder.getIOManager());
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((LSMRTreeWithAntiMatterTuplesOperatorTestHelper) testHelper).createDataFlowHelperFactory(
+                secondaryValueProviderFactories, rtreePolicyType, btreeComparatorFactories, linearizerCmpFactory);
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
new file mode 100644
index 0000000..c4a63df
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/AbstractRTreeOperatorTest.java
@@ -0,0 +1,369 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import java.io.DataOutput;
+import java.io.File;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOperation;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.file.TransientLocalResourceFactoryProvider;
+import edu.uci.ics.hyracks.test.support.TestIndexLifecycleManagerProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.tests.am.common.ITreeIndexOperatorTestHelper;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public abstract class AbstractRTreeOperatorTest extends AbstractIntegrationTest {
+    static {
+        TestStorageManagerComponentHolder.init(8192, 20, 20);
+    }
+
+    protected final IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+    protected final IIndexLifecycleManagerProvider lcManagerProvider = new TestIndexLifecycleManagerProvider();
+    protected IIndexDataflowHelperFactory rtreeDataflowHelperFactory;
+    protected IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
+
+    // field, type and key declarations for primary index
+    protected final int primaryFieldCount = 10;
+    protected final ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
+    protected final int primaryKeyFieldCount = 1;
+    protected final IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+    protected final RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE });
+
+    // to be set by subclasses
+    protected String primaryFileName;
+    protected IFileSplitProvider primarySplitProvider;
+
+    // field, type and key declarations for secondary indexes
+    protected final int secondaryFieldCount = 5;
+    protected final ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
+    protected final int secondaryKeyFieldCount = 4;
+    protected final IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+
+    protected final RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+            UTF8StringSerializerDeserializer.INSTANCE });
+
+    // This is only used for the LSMRTree. We need a comparator Factories for
+    // the BTree component of the LSMRTree.
+    protected final int btreeKeyFieldCount = 5;
+    protected final IBinaryComparatorFactory[] btreeComparatorFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+
+    protected String secondaryFileName;
+    protected IFileSplitProvider secondarySplitProvider;
+
+    protected ITreeIndexOperatorTestHelper testHelper;
+
+    protected ITreeIndexOperatorTestHelper createTestHelper() throws HyracksException {
+        return new RTreeOperatorTestHelper();
+    }
+
+    @Before
+    public void setup() throws Exception {
+        testHelper = createTestHelper();
+
+        primaryFileName = testHelper.getPrimaryIndexName();
+        primarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID, new FileReference(
+                new File(primaryFileName))) });
+        secondaryFileName = testHelper.getSecondaryIndexName();
+        secondarySplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                new FileReference(new File(secondaryFileName))) });
+
+        // field, type and key declarations for primary index
+        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[6] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[7] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[8] = UTF8StringPointable.TYPE_TRAITS;
+        primaryTypeTraits[9] = UTF8StringPointable.TYPE_TRAITS;
+        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        // field, type and key declarations for secondary indexes
+        secondaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
+        secondaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
+        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        secondaryComparatorFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        secondaryComparatorFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+
+        // This only used for LSMRTree
+        btreeComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeComparatorFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeComparatorFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeComparatorFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+        btreeComparatorFactories[4] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+        IPrimitiveValueProviderFactory[] secondaryValueProviderFactories = RTreeUtils
+                .createPrimitiveValueProviderFactories(secondaryComparatorFactories.length, DoublePointable.FACTORY);
+
+        rtreeDataflowHelperFactory = createDataFlowHelperFactory(secondaryValueProviderFactories,
+                RTreePolicyType.RSTARTREE, btreeComparatorFactories,
+                LSMRTreeUtils.proposeBestLinearizer(secondaryTypeTraits, secondaryComparatorFactories.length));
+
+    }
+
+    protected abstract IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
+            throws TreeIndexException;
+
+    protected void createPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories, null,
+                btreeDataflowHelperFactory, localResourceFactoryProvider, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
+        spec.addRoot(primaryCreateOp);
+        runTest(spec);
+    }
+
+    protected void loadPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/orders-with-locations-part1.txt"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
+                ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
+        TreeIndexBulkLoadOperatorDescriptor primaryBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                null, fieldPermutation, 0.7f, false, 1000L, btreeDataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBulkLoad, 0);
+
+        spec.addRoot(primaryBulkLoad);
+        runTest(spec);
+    }
+
+    protected void createSecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        TransientLocalResourceFactoryProvider localResourceFactoryProvider = new TransientLocalResourceFactoryProvider();
+        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, null, rtreeDataflowHelperFactory, localResourceFactoryProvider,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
+        spec.addRoot(secondaryCreateOp);
+        runTest(spec);
+    }
+
+    protected void loadSecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple containing nothing
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] lowKeyFields = null; // - infinity
+        int[] highKeyFields = null; // + infinity
+
+        // scan primary index
+        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                null, lowKeyFields, highKeyFields, true, true, btreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
+
+        // load secondary index
+        int[] fieldPermutation = { 6, 7, 8, 9, 0 };
+        TreeIndexBulkLoadOperatorDescriptor secondaryBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, null, fieldPermutation, 0.7f, false, 1000L, rtreeDataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBulkLoad, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, secondaryBulkLoad, 0);
+
+        spec.addRoot(secondaryBulkLoad);
+        runTest(spec);
+    }
+
+    protected void insertPipeline() throws Exception {
+
+        JobSpecification spec = new JobSpecification();
+
+        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+                "data/orders-with-locations-part2.txt"))) };
+        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE });
+
+        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+        // insert into primary index
+        int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, ordersDesc, storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits,
+                primaryComparatorFactories, null, primaryFieldPermutation, IndexOperation.INSERT,
+                btreeDataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryInsertOp, NC1_ID);
+
+        // secondary index
+        int[] secondaryFieldPermutation = { 9, 10, 11, 12, 0 };
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, ordersDesc, storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOperation.INSERT,
+                rtreeDataflowHelperFactory, null, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryInsertOp, NC1_ID);
+
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, nullSink, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, primaryInsertOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsertOp, 0, secondaryInsertOp, 0);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsertOp, 0, nullSink, 0);
+
+        spec.addRoot(nullSink);
+        runTest(spec);
+    }
+
+    protected void destroyPrimaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IndexDropOperatorDescriptor primaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, primarySplitProvider, btreeDataflowHelperFactory);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryDropOp, NC1_ID);
+        spec.addRoot(primaryDropOp);
+        runTest(spec);
+    }
+
+    protected void destroySecondaryIndex() throws Exception {
+        JobSpecification spec = new JobSpecification();
+        IndexDropOperatorDescriptor secondaryDropOp = new IndexDropOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, secondarySplitProvider, rtreeDataflowHelperFactory);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryDropOp, NC1_ID);
+        spec.addRoot(secondaryDropOp);
+        runTest(spec);
+    }
+
+    @After
+    public abstract void cleanup() throws Exception;
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java
new file mode 100644
index 0000000..85abcb1
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeOperatorTestHelper.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import edu.uci.ics.hyracks.tests.am.common.TreeOperatorTestHelper;
+
+public class RTreeOperatorTestHelper extends TreeOperatorTestHelper {
+
+    public IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] valueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories) {
+        return new RTreeDataflowHelperFactory(valueProviderFactories, rtreePolicyType);
+    }
+
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
new file mode 100644
index 0000000..8e68bcf
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexInsertOperatorTest.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+
+public class RTreeSecondaryIndexInsertOperatorTest extends AbstractRTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+        insertPipeline();
+    }
+
+    @Test
+    public void searchUpdatedSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] keyFields = { 0, 1, 2, 3 };
+
+        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
+
+        // fifth field from the tuples coming from secondary index
+        int[] primaryLowKeyFields = { 4 };
+        // fifth field from the tuples coming from secondary index
+        int[] primaryHighKeyFields = { 4 };
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
+            throws TreeIndexException {
+        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, null);
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
new file mode 100644
index 0000000..90bb918
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexScanOperatorTest.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+
+public class RTreeSecondaryIndexScanOperatorTest extends AbstractRTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+    }
+
+    @Test
+    public void scanPrimaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build dummy tuple
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(0.0, dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] keyFields = null;
+
+        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, null);
+    }
+    
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
new file mode 100644
index 0000000..5da122c
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import java.io.DataOutput;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+
+public class RTreeSecondaryIndexSearchOperatorTest extends AbstractRTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+    }
+
+    @Test
+    public void searchSecondaryIndexTest() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        // build tuple
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        tb.reset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+        tb.addFieldEndOffset();
+        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+        tb.addFieldEndOffset();
+
+        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+        int[] keyFields = { 0, 1, 2, 3 };
+
+        RTreeSearchOperatorDescriptor secondarySearchOp = new RTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
+                storageManager, lcManagerProvider, secondarySplitProvider, secondaryTypeTraits,
+                secondaryComparatorFactories, keyFields, rtreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondarySearchOp, NC1_ID);
+
+        // fifth field from the tuples coming from secondary index
+        int[] primaryLowKeyFields = { 4 };
+        // fifth field from the tuples coming from secondary index
+        int[] primaryHighKeyFields = { 4 };
+
+        // search primary index
+        BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+                storageManager, lcManagerProvider, primarySplitProvider, primaryTypeTraits, primaryComparatorFactories,
+                null, primaryLowKeyFields, primaryHighKeyFields, true, true, btreeDataflowHelperFactory, false,
+                NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primarySearchOp, NC1_ID);
+
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
+        spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
+
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory)
+            throws TreeIndexException {
+        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, null);
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
new file mode 100644
index 0000000..83be1d9
--- /dev/null
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/am/rtree/RTreeSecondaryIndexStatsOperatorTest.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.am.rtree;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ILinearizeComparatorFactory;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreePolicyType;
+
+public class RTreeSecondaryIndexStatsOperatorTest extends AbstractRTreeOperatorTest {
+
+    @Before
+    public void setup() throws Exception {
+        super.setup();
+        createPrimaryIndex();
+        loadPrimaryIndex();
+        createSecondaryIndex();
+        loadSecondaryIndex();
+    }
+
+    @Test
+    public void showPrimaryIndexStats() throws Exception {
+        JobSpecification spec = new JobSpecification();
+
+        TreeIndexStatsOperatorDescriptor secondaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
+                lcManagerProvider, secondarySplitProvider, secondaryTypeTraits, secondaryComparatorFactories, null,
+                rtreeDataflowHelperFactory, NoOpOperationCallbackFactory.INSTANCE);
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryStatsOp, NC1_ID);
+        IFileSplitProvider outSplits = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+                createTempFile().getAbsolutePath()) });
+        IOperatorDescriptor printer = new PlainFileWriterOperatorDescriptor(spec, outSplits, ",");
+        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryStatsOp, 0, printer, 0);
+        spec.addRoot(printer);
+        runTest(spec);
+    }
+
+    @Override
+    protected IIndexDataflowHelperFactory createDataFlowHelperFactory(
+            IPrimitiveValueProviderFactory[] secondaryValueProviderFactories, RTreePolicyType rtreePolicyType,
+            IBinaryComparatorFactory[] btreeComparatorFactories, ILinearizeComparatorFactory linearizerCmpFactory) {
+        return ((RTreeOperatorTestHelper) testHelper).createDataFlowHelperFactory(secondaryValueProviderFactories,
+                rtreePolicyType, null);
+    }
+
+    @Override
+    public void cleanup() throws Exception {
+        destroyPrimaryIndex();
+        destroySecondaryIndex();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
deleted file mode 100644
index 33ddca2..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.btree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class BTreePrimaryIndexScanOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary index
-    private int primaryFieldCount = 6;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
-
-    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part1.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
-
-        spec.addRoot(primaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void scanPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
deleted file mode 100644
index acd3027..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.btree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class BTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary index
-    private int primaryFieldCount = 6;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
-
-    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part1.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
-
-        spec.addRoot(primaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void searchPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing low and high search key
-        // high key and low key
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = { 0 };
-        int[] highKeyFields = { 1 };
-
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
deleted file mode 100644
index ca03b16..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
+++ /dev/null
@@ -1,179 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.btree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class BTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary index
-    private int primaryFieldCount = 6;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
-
-    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part1.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
-
-        spec.addRoot(primaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void showPrimaryIndexStats() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index c9ee118..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,314 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.btree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class BTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary index
-    private int primaryFieldCount = 6;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private int primaryKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private static String primaryBtreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryBtreeName;
-
-    private IFileSplitProvider primaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    // field, type and key declarations for secondary indexes
-    private int secondaryFieldCount = 2;
-    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-    private int secondaryKeyFieldCount = 2;
-    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
-
-    private static String secondaryBtreeName = "secondary" + simpleDateFormat.format(new Date());
-    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryBtreeName;
-
-    private IFileSplitProvider secondaryBtreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
-
-    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary index
-        primaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // field, type and key declarations for secondary indexes
-        secondaryTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-        secondaryComparatorFactories[1] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-        createSecondaryIndex();
-        loadSecondaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/tpch0.001/orders-part1.tbl"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBtreeBulkLoad, 0);
-
-        spec.addRoot(primaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    public void createSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor secondaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryCreateOp, NC1_ID);
-        spec.addRoot(secondaryCreateOp);
-        runTest(spec);
-    }
-
-    public void loadSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        // scan primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, lowKeyFields, highKeyFields, true, true, dataflowHelperFactory, false,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        // sort based on secondary keys
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 3, 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                primaryRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        // load secondary index
-        int[] fieldPermutation = { 3, 0 };
-        TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, secondaryBtreeSplitProvider, secondaryTypeTraits,
-                secondaryComparatorFactories, fieldPermutation, 0.7f, dataflowHelperFactory,
-                NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, sorter, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, secondaryBtreeBulkLoad, 0);
-
-        spec.addRoot(secondaryBtreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void searchSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple containing search keys (only use the first key as search
-        // key)
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        // low key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("1998-07-21", dos);
-        tb.addFieldEndOffset();
-        // high key
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("2000-10-18", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] secondaryLowKeyFields = { 0 };
-        int[] secondaryHighKeyFields = { 1 };
-
-        // search secondary index
-        BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryBtreeSplitProvider,
-                secondaryTypeTraits, secondaryComparatorFactories, secondaryLowKeyFields, secondaryHighKeyFields, true,
-                true, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryBtreeSearchOp, NC1_ID);
-
-        int[] primaryLowKeyFields = { 1 }; // second field from the tuples
-        // coming from secondary index
-        int[] primaryHighKeyFields = { 1 }; // second field from the tuples
-        // coming from secondary index
-
-        // search primary index
-        BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryBtreeSplitProvider, primaryTypeTraits,
-                primaryComparatorFactories, primaryLowKeyFields, primaryHighKeyFields, true, true,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBtreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryBtreeSearchOp, 0, primaryBtreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBtreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-        File secondary = new File(secondaryFileName);
-        secondary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
index 8893567..e74e54c 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractIntegrationTest.java
@@ -60,6 +60,9 @@
     private static IHyracksClientConnection hcc;
 
     private final List<File> outputFiles;
+    
+    protected static int DEFAULT_MEM_PAGE_SIZE = 32768;
+    protected static int DEFAULT_MEM_NUM_PAGES = 1000;
 
     @Rule
     public TemporaryFolder outputFolder = new TemporaryFolder();
@@ -106,7 +109,6 @@
         nc2.start();
 
         hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
-        hcc.createApplication("test", null);
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
         }
@@ -123,7 +125,7 @@
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(spec.toJSON().toString(2));
         }
-        JobId jobId = hcc.startJob("test", spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(jobId.toString());
         }
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
index 24d0ef4..d97b7db 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/AbstractMultiNCIntegrationTest.java
@@ -100,7 +100,6 @@
         }
 
         hcc = new HyracksConnection(ccConfig.clientNetIpAddress, ccConfig.clientNetPort);
-        hcc.createApplication("test", null);
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Starting CC in " + ccRoot.getAbsolutePath());
         }
@@ -118,7 +117,7 @@
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(spec.toJSON().toString(2));
         }
-        JobId jobId = hcc.startJob("test", spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
+        JobId jobId = hcc.startJob(spec, EnumSet.of(JobFlag.PROFILE_RUNTIME));
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info(jobId.toString());
         }
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
deleted file mode 100644
index b5a4df8..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package edu.uci.ics.hyracks.tests.invertedindex;
-
-import java.io.File;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IntegerParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class InvertedIndexOperatorsTest extends AbstractIntegrationTest {
-
-    @Test
-    public void tokenizerTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] dblpTitleFileSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/cleanednumbereddblptitles.txt"))) };
-        IFileSplitProvider dblpTitleSplitProvider = new ConstantFileSplitProvider(dblpTitleFileSplits);
-        RecordDescriptor dblpTitleRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor dblpTitleScanner = new FileScanOperatorDescriptor(spec, dblpTitleSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { IntegerParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), dblpTitleRecDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dblpTitleScanner, NC1_ID);
-
-        RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
-
-        ITokenFactory tokenFactory = new UTF8WordTokenFactory();
-        IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
-                tokenFactory);
-        int[] tokenFields = { 1 };
-        int[] projFields = { 0 };
-        BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
-                tokenizerRecDesc, tokenizerFactory, tokenFields, projFields);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, binaryTokenizer, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), dblpTitleScanner, 0, binaryTokenizer, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), binaryTokenizer, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
deleted file mode 100644
index 92b6e14..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,214 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.rtree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class RTreePrimaryIndexSearchOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory;
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary R-tree index
-    private int primaryFieldCount = 5;
-    private int primaryKeyFieldCount = 4;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE });
-
-    private static String primaryRTreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryRTreeName;
-
-    private IFileSplitProvider primaryRTreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private IPrimitiveValueProviderFactory[] primaryValueProviderFactories;
-    
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary R-tree index
-        primaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        primaryComparatorFactories[1] = primaryComparatorFactories[0];
-        primaryComparatorFactories[2] = primaryComparatorFactories[0];
-        primaryComparatorFactories[3] = primaryComparatorFactories[0];
-
-        primaryValueProviderFactories = RTreeUtils
-                .createPrimitiveValueProviderFactories(primaryComparatorFactories.length, DoublePointable.FACTORY);
-        dataflowHelperFactory = new RTreeDataflowHelperFactory(primaryValueProviderFactories);
-        
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-    
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] objectsSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/spatial.txt"))) };
-        IFileSplitProvider objectsSplitProvider = new ConstantFileSplitProvider(objectsSplits);
-        RecordDescriptor objectsDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor objScanner = new FileScanOperatorDescriptor(spec, objectsSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { DoubleParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), objectsDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, objScanner, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 3, 4 };
-        TreeIndexBulkLoadOperatorDescriptor primaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), objScanner, 0, primaryRTreeBulkLoad, 0);
-
-        spec.addRoot(primaryRTreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void searchPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] keyFields = { 0, 1, 2, 3 };
-
-        RTreeSearchOperatorDescriptor primaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec, primaryRecDesc,
-                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, keyFields,
-                dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryRTreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryRTreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
deleted file mode 100644
index 2a00394..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.rtree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.PlainFileWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class RTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory;
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary R-tree index
-    private int primaryFieldCount = 5;
-    private int primaryKeyFieldCount = 4;
-    private ITypeTraits[] primaryTypeTraits = new ITypeTraits[primaryFieldCount];
-    private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-
-    private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
-            primaryTypeTraits);
-
-    private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE });
-
-    private ITreeIndexFrameFactory primaryInteriorFrameFactory;
-    private ITreeIndexFrameFactory primaryLeafFrameFactory;
-
-    private static String primaryRTreeName = "primary" + simpleDateFormat.format(new Date());
-    private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryRTreeName;
-
-    private IFileSplitProvider primaryRTreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
-
-    private IPrimitiveValueProviderFactory[] primaryValueProviderFactories;
-    
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary R-tree index
-        primaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
-        primaryTypeTraits[4] = DoublePointable.TYPE_TRAITS;
-        primaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        primaryComparatorFactories[1] = primaryComparatorFactories[0];
-        primaryComparatorFactories[2] = primaryComparatorFactories[0];
-        primaryComparatorFactories[3] = primaryComparatorFactories[0];
-
-        primaryValueProviderFactories = RTreeUtils
-                .createPrimitiveValueProviderFactories(primaryComparatorFactories.length, DoublePointable.FACTORY);
-        dataflowHelperFactory = new RTreeDataflowHelperFactory(primaryValueProviderFactories);
-        
-        primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(primaryTupleWriterFactory,
-                primaryValueProviderFactories);
-        primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(primaryTupleWriterFactory, primaryValueProviderFactories);
-
-        createPrimaryIndex();
-        loadPrimaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-    
-    public void loadPrimaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] objectsSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/spatial.txt"))) };
-        IFileSplitProvider objectsSplitProvider = new ConstantFileSplitProvider(objectsSplits);
-        RecordDescriptor objectsDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor objScanner = new FileScanOperatorDescriptor(spec, objectsSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { DoubleParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE }, '|'), objectsDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, objScanner, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 3, 4 };
-        TreeIndexBulkLoadOperatorDescriptor primaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryRTreeSplitProvider, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), objScanner, 0, primaryRTreeBulkLoad, 0);
-
-        spec.addRoot(primaryRTreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void showPrimaryIndexStats() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryRTreeSplitProvider, 
-                primaryTypeTraits, primaryComparatorFactories, dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryStatsOp, 0, printer, 0);
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primary = new File(primaryFileName);
-        primary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
deleted file mode 100644
index 1d86037..0000000
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ /dev/null
@@ -1,324 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.tests.rtree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataset.ResultSetId;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
-import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.result.ResultWriterOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndex;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexCreateOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallbackProvider;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeDataflowHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestIndexRegistryProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
-import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
-import edu.uci.ics.hyracks.tests.util.ResultSerializerFactoryProvider;
-
-public class RTreeSecondaryIndexSearchOperatorTest extends AbstractIntegrationTest {
-    static {
-        TestStorageManagerComponentHolder.init(8192, 20, 20);
-    }
-
-    private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
-    private IIndexRegistryProvider<IIndex> indexRegistryProvider = new TestIndexRegistryProvider();
-    private IIndexDataflowHelperFactory dataflowHelperFactory;
-    private IIndexDataflowHelperFactory btreeDataflowHelperFactory = new BTreeDataflowHelperFactory();
-
-    private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    private final static String sep = System.getProperty("file.separator");
-
-    // field, type and key declarations for primary B-tree index
-    private int primaryBTreeFieldCount = 10;
-    private ITypeTraits[] primaryBTreeTypeTraits = new ITypeTraits[primaryBTreeFieldCount];
-    private int primaryBTreeKeyFieldCount = 1;
-    private IBinaryComparatorFactory[] primaryBTreeComparatorFactories = new IBinaryComparatorFactory[primaryBTreeKeyFieldCount];
-
-    private static String primaryBTreeName = "primaryBTree" + simpleDateFormat.format(new Date());
-    private static String primaryBTreeFileName = System.getProperty("java.io.tmpdir") + sep + primaryBTreeName;
-
-    private IFileSplitProvider primaryBTreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryBTreeFileName))) });
-
-    private RecordDescriptor primaryBTreeRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, });
-
-    // field, type and key declarations for secondary indexes
-    private int secondaryFieldCount = 5;
-    private ITypeTraits[] secondaryTypeTraits = new ITypeTraits[secondaryFieldCount];
-    private int secondaryKeyFieldCount = 4;
-    private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
-
-    private static String secondaryRTreeName = "secondary" + simpleDateFormat.format(new Date());
-    private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryRTreeName;
-
-    private IFileSplitProvider secondaryRTreeSplitProvider = new ConstantFileSplitProvider(
-            new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
-
-    private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-            UTF8StringSerializerDeserializer.INSTANCE });
-
-    private IPrimitiveValueProviderFactory[] secondaryValueProviderFactories;
-    
-    @Before
-    public void setup() throws Exception {
-        // field, type and key declarations for primary B-tree index
-        primaryBTreeTypeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[2] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[3] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[5] = UTF8StringPointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[6] = DoublePointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[7] = DoublePointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[8] = DoublePointable.TYPE_TRAITS;
-        primaryBTreeTypeTraits[9] = DoublePointable.TYPE_TRAITS;
-        primaryBTreeComparatorFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
-
-        // field, type and key declarations for secondary indexes
-        secondaryTypeTraits[0] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[1] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[2] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[3] = DoublePointable.TYPE_TRAITS;
-        secondaryTypeTraits[4] = UTF8StringPointable.TYPE_TRAITS;
-        secondaryComparatorFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
-        secondaryComparatorFactories[1] = secondaryComparatorFactories[0];
-        secondaryComparatorFactories[2] = secondaryComparatorFactories[0];
-        secondaryComparatorFactories[3] = secondaryComparatorFactories[0];
-
-        secondaryValueProviderFactories = RTreeUtils
-                .createPrimitiveValueProviderFactories(secondaryComparatorFactories.length, DoublePointable.FACTORY);
-
-        dataflowHelperFactory = new RTreeDataflowHelperFactory(secondaryValueProviderFactories);
-        
-        createPrimaryIndex();
-        loadPrimaryBTreeIndexTest();
-        createSecondaryIndex();
-        loadSecondaryIndexTest();
-    }
-
-    public void createPrimaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, primaryBTreeSplitProvider, primaryBTreeTypeTraits,
-                primaryBTreeComparatorFactories, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-    
-    public void loadPrimaryBTreeIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
-                "data/orders-with-locations.txt"))) };
-        IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
-        RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE });
-
-        FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
-                new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
-                        DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
-
-        ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
-                new IBinaryComparatorFactory[] { PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY) },
-                ordersDesc);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
-
-        int[] fieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
-        TreeIndexBulkLoadOperatorDescriptor primaryBTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, primaryBTreeSplitProvider, primaryBTreeTypeTraits, primaryBTreeComparatorFactories,
-                fieldPermutation, 0.7f, btreeDataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBTreeBulkLoad, 0);
-
-        spec.addRoot(primaryBTreeBulkLoad);
-        runTest(spec);
-    }
-
-    public void createSecondaryIndex() throws Exception {
-        JobSpecification spec = new JobSpecification();
-        TreeIndexCreateOperatorDescriptor primaryCreateOp = new TreeIndexCreateOperatorDescriptor(spec, storageManager,
-                indexRegistryProvider, secondaryRTreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryCreateOp, NC1_ID);
-        spec.addRoot(primaryCreateOp);
-        runTest(spec);
-    }
-    
-    public void loadSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build dummy tuple containing nothing
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryBTreeKeyFieldCount * 2);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] lowKeyFields = null; // - infinity
-        int[] highKeyFields = null; // + infinity
-
-        // scan primary index
-        BTreeSearchOperatorDescriptor primaryBTreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
-                primaryBTreeRecDesc, storageManager, indexRegistryProvider, primaryBTreeSplitProvider,
-                primaryBTreeTypeTraits, primaryBTreeComparatorFactories, lowKeyFields, highKeyFields, 
-                true, true, btreeDataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeSearchOp, NC1_ID);
-
-        // load secondary index
-        int[] fieldPermutation = { 6, 7, 8, 9, 0 };
-        TreeIndexBulkLoadOperatorDescriptor secondaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
-                storageManager, indexRegistryProvider, secondaryRTreeSplitProvider, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
-                dataflowHelperFactory, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeBulkLoad, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBTreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), primaryBTreeSearchOp, 0, secondaryRTreeBulkLoad, 0);
-
-        spec.addRoot(secondaryRTreeBulkLoad);
-        runTest(spec);
-    }
-
-    @Test
-    public void searchSecondaryIndexTest() throws Exception {
-        JobSpecification spec = new JobSpecification();
-
-        // build tuple
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
-        DataOutput dos = tb.getDataOutput();
-
-        tb.reset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-        tb.addFieldEndOffset();
-        DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-        tb.addFieldEndOffset();
-
-        ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-        ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
-                keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
-
-        int[] keyFields = { 0, 1, 2, 3 };
-
-        RTreeSearchOperatorDescriptor secondaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec,
-                secondaryRecDesc, storageManager, indexRegistryProvider, secondaryRTreeSplitProvider,
-                secondaryTypeTraits, secondaryComparatorFactories, keyFields, dataflowHelperFactory, false, NoOpOperationCallbackProvider.INSTANCE);
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeSearchOp, NC1_ID);
-
-        ResultSetId rsId = new ResultSetId(1);
-        spec.addResultSetId(rsId);
-
-        IOperatorDescriptor printer = new ResultWriterOperatorDescriptor(spec, rsId, true,
-                ResultSerializerFactoryProvider.INSTANCE.getResultSerializerFactoryProvider());
-        PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
-
-        spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryRTreeSearchOp, 0);
-        spec.connect(new OneToOneConnectorDescriptor(spec), secondaryRTreeSearchOp, 0, printer, 0);
-
-        spec.addRoot(printer);
-        runTest(spec);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File primaryBTree = new File(primaryBTreeFileName);
-        primaryBTree.deleteOnExit();
-
-        File secondary = new File(secondaryFileName);
-        secondary.deleteOnExit();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/text-example/pom.xml b/hyracks/hyracks-examples/text-example/pom.xml
index a183835..d370f828 100644
--- a/hyracks/hyracks-examples/text-example/pom.xml
+++ b/hyracks/hyracks-examples/text-example/pom.xml
@@ -1,6 +1,5 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>text-example</artifactId>
   <packaging>pom</packaging>
   <name>text-example</name>
@@ -14,6 +13,6 @@
   <modules>
     <module>texthelper</module>
     <module>textclient</module>
-    <module>textapp</module>
+    <module>textserver</module>
   </modules>
 </project>
diff --git a/hyracks/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml
deleted file mode 100644
index 43ace6c..0000000
--- a/hyracks/hyracks-examples/text-example/textapp/src/main/assembly/app-assembly.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<assembly>
-  <id>app-assembly</id>
-  <formats>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>target/application/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/hyracks/hyracks-examples/text-example/textclient/pom.xml b/hyracks/hyracks-examples/text-example/textclient/pom.xml
index 633f480..1e213a3 100644
--- a/hyracks/hyracks-examples/text-example/textclient/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textclient/pom.xml
@@ -1,11 +1,10 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
   <artifactId>textclient</artifactId>
   <name>textclient</name>
 
   <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>text-example</artifactId>
     <version>0.2.4-SNAPSHOT</version>
   </parent>
@@ -18,7 +17,7 @@
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>texthelper</artifactId>
   		<version>0.2.4-SNAPSHOT</version>
   		<type>jar</type>
diff --git a/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
index 943f232..18813a7 100644
--- a/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
+++ b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/ExternalGroupClient.java
@@ -74,9 +74,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-infile-splits", usage = "Comma separated list of file-splits for the input. A file-split is <node-name>:<path>", required = true)
         public String inFileSplits;
 
@@ -122,7 +119,7 @@
 
             System.out.print(i + "\t" + (System.currentTimeMillis() - start));
             start = System.currentTimeMillis();
-            JobId jobId = hcc.startJob(options.app, job);
+            JobId jobId = hcc.startJob(job);
             hcc.waitForCompletion(jobId);
             System.out.println("\t" + (System.currentTimeMillis() - start));
         }
diff --git a/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
index 31019ab..5b6fad9 100644
--- a/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
+++ b/hyracks/hyracks-examples/text-example/textclient/src/main/java/edu/uci/ics/hyracks/examples/text/client/WordCountMain.java
@@ -65,9 +65,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)")
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-infile-splits", usage = "Comma separated list of file-splits for the input. A file-split is <node-name>:<path>", required = true)
         public String inFileSplits;
 
@@ -101,8 +98,8 @@
                 options.algo, options.htSize, options.sbSize, options.format);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job, options.runtimeProfiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME)
-                : EnumSet.noneOf(JobFlag.class));
+        JobId jobId = hcc.startJob(job,
+                options.runtimeProfiling ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
         System.err.println(start + " " + end + " " + (end - start));
diff --git a/hyracks/hyracks-examples/text-example/texthelper/pom.xml b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
index 5cf5da3..2adb892 100644
--- a/hyracks/hyracks-examples/text-example/texthelper/pom.xml
+++ b/hyracks/hyracks-examples/text-example/texthelper/pom.xml
@@ -1,11 +1,10 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.text</groupId>
   <artifactId>texthelper</artifactId>
   <name>texthelper</name>
 
   <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>text-example</artifactId>
     <version>0.2.4-SNAPSHOT</version>
   </parent>
diff --git a/hyracks/hyracks-examples/text-example/textapp/data/file1.txt b/hyracks/hyracks-examples/text-example/textserver/data/file1.txt
similarity index 100%
rename from hyracks/hyracks-examples/text-example/textapp/data/file1.txt
rename to hyracks/hyracks-examples/text-example/textserver/data/file1.txt
diff --git a/hyracks/hyracks-examples/text-example/textapp/data/file2.txt b/hyracks/hyracks-examples/text-example/textserver/data/file2.txt
similarity index 100%
rename from hyracks/hyracks-examples/text-example/textapp/data/file2.txt
rename to hyracks/hyracks-examples/text-example/textserver/data/file2.txt
diff --git a/hyracks/hyracks-examples/text-example/textapp/pom.xml b/hyracks/hyracks-examples/text-example/textserver/pom.xml
similarity index 61%
rename from hyracks/hyracks-examples/text-example/textapp/pom.xml
rename to hyracks/hyracks-examples/text-example/textserver/pom.xml
index 5927e83..79d9ab0 100644
--- a/hyracks/hyracks-examples/text-example/textapp/pom.xml
+++ b/hyracks/hyracks-examples/text-example/textserver/pom.xml
@@ -1,59 +1,41 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks.examples.text</groupId>
-  <artifactId>textapp</artifactId>
-  <name>textapp</name>
+  <artifactId>textserver</artifactId>
+  <name>textserver</name>
 
   <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>text-example</artifactId>
     <version>0.2.4-SNAPSHOT</version>
   </parent>
 
   <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
     <plugins>
       <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
         <executions>
           <execution>
-            <id>copy-dependencies</id>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+                  <name>hyrackscc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+                  <name>hyracksnc</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
             <phase>package</phase>
             <goals>
-              <goal>copy-dependencies</goal>
+              <goal>assemble</goal>
             </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
           </execution>
         </executions>
       </plugin>
@@ -64,7 +46,7 @@
           <execution>
             <configuration>
               <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
               </descriptors>
             </configuration>
             <phase>package</phase>
@@ -79,8 +61,7 @@
       	<artifactId>hyracks-virtualcluster-maven-plugin</artifactId>
       	<version>0.2.4-SNAPSHOT</version>
         <configuration>
-          <hyracksServerHome>${basedir}/../../../hyracks-server/target/hyracks-server-${project.version}-binary-assembly</hyracksServerHome>
-          <hyracksCLIHome>${basedir}/../../../hyracks-cli/target/hyracks-cli-${project.version}-binary-assembly</hyracksCLIHome>
+          <hyracksServerHome>${basedir}/target/textserver-${project.version}-binary-assembly</hyracksServerHome>
           <jvmOptions>${jvm.extraargs}</jvmOptions>
         </configuration>
         <executions>
@@ -116,18 +97,6 @@
             </configuration>
           </execution>
           <execution>
-            <id>deploy-app</id>
-            <phase>pre-integration-test</phase>
-            <goals>
-              <goal>deploy-app</goal>
-            </goals>
-            <configuration>
-              <ccHost>localhost</ccHost>
-              <appName>text</appName>
-              <harFile>${project.build.directory}/textapp-${project.version}-app-assembly.zip</harFile>
-            </configuration>
-          </execution>
-          <execution>
             <id>stop-services</id>
             <phase>post-integration-test</phase>
             <goals>
@@ -141,9 +110,8 @@
       	<artifactId>maven-compiler-plugin</artifactId>
       	<version>2.0.2</version>
         <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-          <fork>true</fork>
+          <source>1.6</source>
+          <target>1.6</target>
         </configuration>
       </plugin>
       <plugin>
@@ -164,13 +132,27 @@
   </build>
   <dependencies>
   	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>texthelper</artifactId>
   		<version>0.2.4-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
-  		<groupId>edu.uci.ics.hyracks.examples.text</groupId>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>textclient</artifactId>
   		<version>0.2.4-SNAPSHOT</version>
   		<type>jar</type>
diff --git a/hyracks/hyracks-examples/text-example/textserver/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/text-example/textserver/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..cd598d9
--- /dev/null
+++ b/hyracks/hyracks-examples/text-example/textserver/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,23 @@
+<assembly>
+  <id>binary-assembly</id>
+  <formats>
+    <format>zip</format>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target/appassembler/bin</directory>
+      <outputDirectory>bin</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>target/appassembler/lib</directory>
+      <outputDirectory>lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>docs</directory>
+      <outputDirectory>docs</outputDirectory>
+    </fileSet>
+  </fileSets>
+</assembly>
diff --git a/hyracks/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java b/hyracks/hyracks-examples/text-example/textserver/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
similarity index 90%
rename from hyracks/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
rename to hyracks/hyracks-examples/text-example/textserver/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
index 9659288..76af72d 100644
--- a/hyracks/hyracks-examples/text-example/textapp/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
+++ b/hyracks/hyracks-examples/text-example/textserver/src/test/java/edu/uci/ics/hyracks/examples/text/test/WordCountIT.java
@@ -10,7 +10,7 @@
     @Test
     public void runWordCount() throws Exception {
         WordCountMain.main(new String[] { "-host", "localhost", "-infile-splits", getInfileSplits(), "-outfile-splits",
-                getOutfileSplits(), "-algo", "-hash", "-app", "text" });
+                getOutfileSplits(), "-algo", "-hash" });
     }
 
     private String getInfileSplits() {
@@ -22,4 +22,4 @@
         return "NC1:" + new File("target/wc1.txt").getAbsolutePath() + ",NC2:"
                 + new File("target/wc2.txt").getAbsolutePath();
     }
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-examples/tpch-example/pom.xml b/hyracks/hyracks-examples/tpch-example/pom.xml
index a36efc9..2ec15d8 100644
--- a/hyracks/hyracks-examples/tpch-example/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/pom.xml
@@ -1,6 +1,5 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples</groupId>
   <artifactId>tpch-example</artifactId>
   <packaging>pom</packaging>
   <name>tpch-example</name>
@@ -13,6 +12,6 @@
 
   <modules>
     <module>tpchclient</module>
-    <module>tpchapp</module>
+    <module>tpchserver</module>
   </modules>
 </project>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml
deleted file mode 100644
index 89689c2..0000000
--- a/hyracks/hyracks-examples/tpch-example/tpchapp/pom.xml
+++ /dev/null
@@ -1,91 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
-  <artifactId>tpchapp</artifactId>
-  <name>tpchapp</name>
-  <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
-    <artifactId>tpch-example</artifactId>
-    <version>0.2.4-SNAPSHOT</version>
-  </parent>
-
-  <build>
-    <pluginManagement>
-      <plugins>
-        <plugin>
-          <groupId>org.eclipse.m2e</groupId>
-          <artifactId>lifecycle-mapping</artifactId>
-          <version>1.0.0</version>
-          <configuration>
-            <lifecycleMappingMetadata>
-              <pluginExecutions>
-                <pluginExecution>
-                  <pluginExecutionFilter>
-                    <groupId>org.apache.maven.plugins</groupId>
-                    <artifactId>maven-dependency-plugin</artifactId>
-                    <versionRange>[1.0.0,)</versionRange>
-                    <goals>
-                      <goal>copy-dependencies</goal>
-                    </goals>
-                  </pluginExecutionFilter>
-                  <action>
-                    <ignore />
-                  </action>
-                </pluginExecution>
-              </pluginExecutions>
-            </lifecycleMappingMetadata>
-          </configuration>
-        </plugin>
-      </plugins>
-	</pluginManagement>
-  
-    <plugins>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <execution>
-            <id>copy-dependencies</id>
-            <phase>package</phase>
-            <goals>
-              <goal>copy-dependencies</goal>
-            </goals>
-            <configuration>
-              <outputDirectory>target/application/lib</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-assembly-plugin</artifactId>
-        <version>2.2-beta-5</version>
-        <executions>
-          <execution>
-            <configuration>
-              <descriptors>
-                <descriptor>src/main/assembly/app-assembly.xml</descriptor>
-              </descriptors>
-            </configuration>
-            <phase>package</phase>
-            <goals>
-              <goal>attached</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-    </plugins>
-  </build>
-  <dependencies>
-    <dependency>
-        <groupId>edu.uci.ics.hyracks</groupId>
-        <artifactId>hyracks-dataflow-std</artifactId>
-        <version>0.2.4-SNAPSHOT</version>
-        <scope>compile</scope>
-    </dependency>
-    <dependency>
-    	<groupId>edu.uci.ics.hyracks</groupId>
-    	<artifactId>hyracks-data-std</artifactId>
-    	<version>0.2.4-SNAPSHOT</version>
-    </dependency>
-  </dependencies>
-</project>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml
deleted file mode 100644
index 43ace6c..0000000
--- a/hyracks/hyracks-examples/tpch-example/tpchapp/src/main/assembly/app-assembly.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<assembly>
-  <id>app-assembly</id>
-  <formats>
-    <format>zip</format>
-  </formats>
-  <includeBaseDirectory>false</includeBaseDirectory>
-  <fileSets>
-    <fileSet>
-      <directory>target/application/lib</directory>
-      <outputDirectory>lib</outputDirectory>
-    </fileSet>
-  </fileSets>
-</assembly>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
index d6f1d17..486ed70 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/pom.xml
@@ -1,10 +1,9 @@
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
   <modelVersion>4.0.0</modelVersion>
-  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
   <artifactId>tpchclient</artifactId>
   <name>tpchclient</name>
   <parent>
-    <groupId>edu.uci.ics.hyracks.examples</groupId>
+    <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>tpch-example</artifactId>
     <version>0.2.4-SNAPSHOT</version>
   </parent>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
index 0ad0ff0..6df9ff8 100644
--- a/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
+++ b/hyracks/hyracks-examples/tpch-example/tpchclient/src/main/java/edu/uci/ics/hyracks/examples/tpch/client/Main.java
@@ -72,9 +72,6 @@
         @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1098)", required = false)
         public int port = 1098;
 
-        @Option(name = "-app", usage = "Hyracks Application name", required = true)
-        public String app;
-
         @Option(name = "-infile-customer-splits", usage = "Comma separated list of file-splits for the CUSTOMER input. A file-split is <node-name>:<path>", required = true)
         public String inFileCustomerSplits;
 
@@ -127,7 +124,7 @@
                 options.graceFactor, options.memSize, options.tableSize, options.hasGroupBy);
 
         long start = System.currentTimeMillis();
-        JobId jobId = hcc.startJob(options.app, job,
+        JobId jobId = hcc.startJob(job,
                 options.profile ? EnumSet.of(JobFlag.PROFILE_RUNTIME) : EnumSet.noneOf(JobFlag.class));
         hcc.waitForCompletion(jobId);
         long end = System.currentTimeMillis();
diff --git a/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml b/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
new file mode 100644
index 0000000..3ec713a
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/tpchserver/pom.xml
@@ -0,0 +1,88 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks.examples.tpch</groupId>
+  <artifactId>tpchserver</artifactId>
+  <name>tpchserver</name>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>tpch-example</artifactId>
+    <version>0.2.4-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>appassembler-maven-plugin</artifactId>
+        <version>1.3</version>
+        <executions>
+          <execution>
+            <configuration>
+              <programs>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.cc.CCDriver</mainClass>
+                  <name>hyrackscc</name>
+                </program>
+                <program>
+                  <mainClass>edu.uci.ics.hyracks.control.nc.NCDriver</mainClass>
+                  <name>hyracksnc</name>
+                </program>
+              </programs>
+              <repositoryLayout>flat</repositoryLayout>
+              <repositoryName>lib</repositoryName>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>assemble</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <executions>
+          <execution>
+            <configuration>
+              <descriptors>
+                <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+              </descriptors>
+            </configuration>
+            <phase>package</phase>
+            <goals>
+              <goal>attached</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+    <dependency>
+        <groupId>edu.uci.ics.hyracks</groupId>
+        <artifactId>hyracks-dataflow-std</artifactId>
+        <version>0.2.4-SNAPSHOT</version>
+        <scope>compile</scope>
+    </dependency>
+    <dependency>
+    	<groupId>edu.uci.ics.hyracks</groupId>
+    	<artifactId>hyracks-data-std</artifactId>
+    	<version>0.2.4-SNAPSHOT</version>
+    </dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-cc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.2.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks/hyracks-examples/tpch-example/tpchserver/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/tpch-example/tpchserver/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..cd598d9
--- /dev/null
+++ b/hyracks/hyracks-examples/tpch-example/tpchserver/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,23 @@
+<assembly>
+  <id>binary-assembly</id>
+  <formats>
+    <format>zip</format>
+    <format>dir</format>
+  </formats>
+  <includeBaseDirectory>false</includeBaseDirectory>
+  <fileSets>
+    <fileSet>
+      <directory>target/appassembler/bin</directory>
+      <outputDirectory>bin</outputDirectory>
+      <fileMode>0755</fileMode>
+    </fileSet>
+    <fileSet>
+      <directory>target/appassembler/lib</directory>
+      <outputDirectory>lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>docs</directory>
+      <outputDirectory>docs</outputDirectory>
+    </fileSet>
+  </fileSets>
+</assembly>