- Added a value provider to allow the r-tree to get the actual key values.
- Improved the arguments error handling for the r-tree
- Formatted the code.

git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_dev_next@551 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
index 6f371c0..05b16c8 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -156,12 +156,12 @@
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
 
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
-        
+
         // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec,
-                recDesc, storageManager, treeIndexRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
-                primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, primaryFieldPermutation,
-                IndexOp.INSERT, opHelperFactory);
+        TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, recDesc, storageManager, treeIndexRegistryProvider, primarySplitProvider,
+                primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories,
+                null, primaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
 
         // prepare insertion into secondary index
@@ -187,10 +187,10 @@
         IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
                 options.secondaryBTreeName);
         // create operator descriptor
-        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec,
-                recDesc, storageManager, treeIndexRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
-                secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories,
-                secondaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
+        TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+                spec, recDesc, storageManager, treeIndexRegistryProvider, secondarySplitProvider,
+                secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
+                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
 
         // end the insert pipeline at this sink operator
@@ -216,4 +216,4 @@
 
         return spec;
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index f7942eb..8019080 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -154,12 +154,12 @@
         // tuple
         int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
                                                  // to field 0 of B-Tree tuple,
-                                                 // etc.                
+                                                 // etc.
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
-        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, storageManager,
-                treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
-                comparatorFactories, fieldPermutation, 0.7f, opHelperFactory);
+        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory,
+                typeTraits, comparatorFactories, null, fieldPermutation, 0.7f, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
         // distribute the records from the datagen via hashing to the bulk load
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
index cae9660..b75f64d 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
@@ -110,9 +110,9 @@
 
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
-        TreeIndexFileEnlistmentOperatorDescriptor fileEnlistmentOp = new TreeIndexFileEnlistmentOperatorDescriptor(spec,
-                recDesc, storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
+        TreeIndexFileEnlistmentOperatorDescriptor fileEnlistmentOp = new TreeIndexFileEnlistmentOperatorDescriptor(
+                spec, recDesc, storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory,
+                leafFrameFactory, typeTraits, comparatorFactories, null, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, fileEnlistmentOp, splitNCs);
 
         spec.addRoot(fileEnlistmentOp);
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 47fecad..21c00a1 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -96,7 +96,7 @@
         JobSpecification spec = new JobSpecification();
 
         String[] splitNCs = options.ncs.split(",");
-        
+
         IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
@@ -125,8 +125,8 @@
         // use a disk-order scan to read primary index
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
-        TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec, recDesc,
-                storageManager, treeIndexRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+        TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec,
+                recDesc, storageManager, treeIndexRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
                 primaryLeafFrameFactory, primaryTypeTraits, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
 
@@ -157,9 +157,10 @@
         // tuple
         int[] fieldPermutation = { 1, 0 };
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
-        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, storageManager,
-                treeIndexRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
-                secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f, opHelperFactory);
+        TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+                storageManager, treeIndexRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory,
+                secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, null, fieldPermutation, 0.7f,
+                opHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
         // connect the ops
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
similarity index 79%
copy from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
copy to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
index e835d1c..ba7621d 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -55,7 +55,6 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -63,7 +62,7 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class BTreePrimaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class BTreePrimaryIndexScanOperatorTest extends AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -116,7 +115,7 @@
 		primaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryTypeTraits[5] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-		
+
 		loadPrimaryIndexTest();
 	}
 
@@ -169,7 +168,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
+				primaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -185,22 +184,6 @@
 	}
 
 	@Test
-	public void showPrimaryIndexStats() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(
-				spec, storageManager, treeIndexRegistryProvider,
-				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
-				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryStatsOp, NC1_ID);
-
-		spec.addRoot(primaryStatsOp);
-		runTest(spec);
-	}
-
-	@Test
 	public void scanPrimaryIndexTest() throws Exception {
 		JobSpecification spec = new JobSpecification();
 
@@ -248,60 +231,6 @@
 		runTest(spec);
 	}
 
-	@Test
-	public void searchPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build tuple containing low and high search key
-		// high key and low key
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		// low key
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
-		tb.addFieldEndOffset();
-		// high key
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				UTF8StringSerializerDeserializer.INSTANCE,
-				UTF8StringSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] lowKeyFields = { 0 };
-		int[] highKeyFields = { 1 };
-
-		BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryBtreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, true,
-				lowKeyFields, highKeyFields, true, true, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryBtreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryBtreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryBtreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
-
 	@AfterClass
 	public static void cleanup() throws Exception {
 		File primary = new File(primaryFileName);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
similarity index 80%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
rename to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
index e835d1c..c64424d 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -55,7 +55,6 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -63,7 +62,8 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class BTreePrimaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class BTreePrimaryIndexSearchOperatorTest extends
+		AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -116,7 +116,7 @@
 		primaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryTypeTraits[5] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-		
+
 		loadPrimaryIndexTest();
 	}
 
@@ -169,7 +169,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
+				primaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -185,70 +185,6 @@
 	}
 
 	@Test
-	public void showPrimaryIndexStats() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(
-				spec, storageManager, treeIndexRegistryProvider,
-				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
-				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryStatsOp, NC1_ID);
-
-		spec.addRoot(primaryStatsOp);
-		runTest(spec);
-	}
-
-	@Test
-	public void scanPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build dummy tuple containing nothing
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				UTF8StringSerializerDeserializer.INSTANCE,
-				UTF8StringSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] lowKeyFields = null; // - infinity
-		int[] highKeyFields = null; // + infinity
-
-		BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryBtreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, true,
-				lowKeyFields, highKeyFields, true, true, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryBtreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryBtreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryBtreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
-	@Test
 	public void searchPrimaryIndexTest() throws Exception {
 		JobSpecification spec = new JobSpecification();
 
@@ -301,7 +237,6 @@
 		runTest(spec);
 	}
 
-
 	@AfterClass
 	public static void cleanup() throws Exception {
 		File primary = new File(primaryFileName);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
similarity index 68%
copy from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
copy to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
index e835d1c..e8fd60b 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.tests.btree;
 
-import java.io.DataOutput;
 import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -32,7 +31,6 @@
 import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
@@ -43,11 +41,8 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
@@ -63,7 +58,7 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class BTreePrimaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class BTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -116,7 +111,7 @@
 		primaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryTypeTraits[5] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-		
+
 		loadPrimaryIndexTest();
 	}
 
@@ -169,7 +164,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
+				primaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -200,108 +195,6 @@
 		runTest(spec);
 	}
 
-	@Test
-	public void scanPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build dummy tuple containing nothing
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				UTF8StringSerializerDeserializer.INSTANCE,
-				UTF8StringSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] lowKeyFields = null; // - infinity
-		int[] highKeyFields = null; // + infinity
-
-		BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryBtreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, true,
-				lowKeyFields, highKeyFields, true, true, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryBtreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryBtreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryBtreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
-	@Test
-	public void searchPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build tuple containing low and high search key
-		// high key and low key
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		// low key
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos);
-		tb.addFieldEndOffset();
-		// high key
-		UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				UTF8StringSerializerDeserializer.INSTANCE,
-				UTF8StringSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] lowKeyFields = { 0 };
-		int[] highKeyFields = { 1 };
-
-		BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryBtreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, true,
-				lowKeyFields, highKeyFields, true, true, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryBtreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryBtreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryBtreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
-
 	@AfterClass
 	public static void cleanup() throws Exception {
 		File primary = new File(primaryFileName);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeUpdateSecondaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
similarity index 98%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeUpdateSecondaryIndexOperatorsTest.java
rename to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
index b9c61e7..5c527cb 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeUpdateSecondaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -65,7 +65,7 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class BTreeUpdateSecondaryIndexOperatorsTest extends
+public class BTreeSecondaryIndexInsertOperatorTest extends
 		AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
@@ -206,7 +206,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
+				primaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -272,7 +272,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, fieldPermutation, 0.7f,
+				secondaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryBtreeBulkLoad, NC1_ID);
@@ -330,7 +330,7 @@
 				spec, ordersDesc, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, primaryFieldPermutation,
+				primaryComparatorFactories, null, primaryFieldPermutation,
 				IndexOp.INSERT, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeInsertOp, NC1_ID);
@@ -341,7 +341,7 @@
 				spec, ordersDesc, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, fieldPermutationB,
+				secondaryComparatorFactories, null, fieldPermutationB,
 				IndexOp.INSERT, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryInsertOp, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
similarity index 98%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryOperatorsTest.java
rename to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
index bdd1ac3..60c3b13 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -62,7 +62,8 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class BTreeSecondaryOperatorsTest extends AbstractIntegrationTest {
+public class BTreeSecondaryIndexSearchOperatorTest extends
+		AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -201,7 +202,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
+				primaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -267,7 +268,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, fieldPermutation, 0.7f,
+				secondaryComparatorFactories, null, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryBtreeBulkLoad, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
similarity index 88%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java
rename to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
index 71a9d89..1923c10 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
@@ -47,17 +47,17 @@
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
 import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -65,7 +65,8 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class RTreePrimaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class RTreePrimaryIndexSearchOperatorTest extends
+		AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -73,7 +74,6 @@
 	private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
 	private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = new TestTreeIndexRegistryProvider();
 	private ITreeIndexOpHelperFactory opHelperFactory = new RTreeOpHelperFactory();
-	private ITreeIndexOpHelperFactory bTreeopHelperFactory = new BTreeOpHelperFactory();
 
 	private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
 			"ddMMyy-hhmmssSS");
@@ -84,6 +84,7 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
@@ -97,11 +98,9 @@
 					UTF8StringSerializerDeserializer.INSTANCE });
 
 	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -124,6 +123,10 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
+		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
+		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
 
 		loadPrimaryIndexTest();
 	}
@@ -159,8 +162,8 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
-				opHelperFactory);
+				primaryComparatorFactories, primaryValueProviderFactories,
+				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
 
@@ -172,22 +175,6 @@
 	}
 
 	@Test
-	public void showPrimaryIndexStats() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(
-				spec, storageManager, treeIndexRegistryProvider,
-				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
-				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryStatsOp, NC1_ID);
-
-		spec.addRoot(primaryStatsOp);
-		runTest(spec);
-	}
-
-	@Test
 	public void searchPrimaryIndexTest() throws Exception {
 		JobSpecification spec = new JobSpecification();
 
@@ -224,8 +211,8 @@
 				spec, primaryRecDesc, storageManager,
 				treeIndexRegistryProvider, primaryRTreeSplitProvider,
 				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, keyFields,
-				opHelperFactory);
+				primaryTypeTraits, primaryComparatorFactories,
+				primaryValueProviderFactories, keyFields, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeSearchOp, NC1_ID);
 
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
similarity index 74%
copy from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java
copy to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
index 71a9d89..b6b4e20 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.tests.rtree;
 
-import java.io.DataOutput;
 import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
@@ -32,7 +31,6 @@
 import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
@@ -45,9 +43,7 @@
 import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
@@ -55,9 +51,9 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeOpHelperFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -65,7 +61,7 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class RTreePrimaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class RTreePrimaryIndexStatsOperatorTest extends AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -73,7 +69,6 @@
 	private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
 	private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = new TestTreeIndexRegistryProvider();
 	private ITreeIndexOpHelperFactory opHelperFactory = new RTreeOpHelperFactory();
-	private ITreeIndexOpHelperFactory bTreeopHelperFactory = new BTreeOpHelperFactory();
 
 	private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
 			"ddMMyy-hhmmssSS");
@@ -84,6 +79,7 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
@@ -97,11 +93,9 @@
 					UTF8StringSerializerDeserializer.INSTANCE });
 
 	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -124,6 +118,10 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
+		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
+		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
 
 		loadPrimaryIndexTest();
 	}
@@ -159,8 +157,8 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
-				opHelperFactory);
+				primaryComparatorFactories, primaryValueProviderFactories,
+				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
 
@@ -187,61 +185,6 @@
 		runTest(spec);
 	}
 
-	@Test
-	public void searchPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build tuple
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] keyFields = { 0, 1, 2, 3 };
-
-		RTreeSearchOperatorDescriptor primaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryRTreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, keyFields,
-				opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryRTreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryRTreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryRTreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
 	@AfterClass
 	public static void cleanup() throws Exception {
 		File primary = new File(primaryFileName);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
similarity index 85%
rename from hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexOperatorsTest.java
rename to hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index 07ead00..cc92e97 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -53,17 +53,18 @@
 import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -71,7 +72,8 @@
 import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
 import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
 
-public class RTreeSecondaryIndexOperatorsTest extends AbstractIntegrationTest {
+public class RTreeSecondaryIndexSearchOperatorTest extends
+		AbstractIntegrationTest {
 	static {
 		TestStorageManagerComponentHolder.init(8192, 20, 20);
 	}
@@ -90,24 +92,15 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
 
-	private RecordDescriptor primaryRecDesc = new RecordDescriptor(
-			new ISerializerDeserializer[] {
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					UTF8StringSerializerDeserializer.INSTANCE });
-
 	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryRecDesc.getFields(),
-			primaryKeyFieldCount);
+			primaryTupleWriterFactory, primaryKeyFieldCount);
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -118,6 +111,14 @@
 			new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
 					primaryFileName))) });
 
+	private RecordDescriptor primaryRecDesc = new RecordDescriptor(
+			new ISerializerDeserializer[] {
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					UTF8StringSerializerDeserializer.INSTANCE });
+
 	// field, type and key declarations for primary B-tree index
 	private int primaryBTreeFieldCount = 10;
 	private ITypeTrait[] primaryBTreeTypeTraits = new ITypeTrait[primaryBTreeFieldCount];
@@ -157,23 +158,15 @@
 	private ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
 	private int secondaryKeyFieldCount = 4;
 	private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+	private IPrimitiveValueProviderFactory[] secondaryValueProviderFactories = new IPrimitiveValueProviderFactory[secondaryKeyFieldCount];
+
 	private RTreeTypeAwareTupleWriterFactory secondaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			secondaryTypeTraits);
 
-	private RecordDescriptor secondaryRecDesc = new RecordDescriptor(
-			new ISerializerDeserializer[] {
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					DoubleSerializerDeserializer.INSTANCE,
-					UTF8StringSerializerDeserializer.INSTANCE });
-
 	private ITreeIndexFrameFactory secondaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			secondaryTupleWriterFactory, secondaryRecDesc.getFields(),
-			secondaryKeyFieldCount);
+			secondaryTupleWriterFactory, secondaryKeyFieldCount);
 	private ITreeIndexFrameFactory secondaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			secondaryTupleWriterFactory, secondaryRecDesc.getFields(),
-			secondaryKeyFieldCount);
+			secondaryTupleWriterFactory, secondaryKeyFieldCount);
 
 	private static String secondaryRTreeName = "secondary"
 			+ simpleDateFormat.format(new Date());
@@ -184,6 +177,14 @@
 			new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
 					secondaryFileName))) });
 
+	private RecordDescriptor secondaryRecDesc = new RecordDescriptor(
+			new ISerializerDeserializer[] {
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					DoubleSerializerDeserializer.INSTANCE,
+					UTF8StringSerializerDeserializer.INSTANCE });
+
 	@Before
 	public void setup() throws Exception {
 		// field, type and key declarations for primary R-tree index
@@ -196,6 +197,10 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
+		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
+		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
+		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
 
 		// field, type and key declarations for primary B-tree index
 		primaryBTreeTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
@@ -220,7 +225,11 @@
 		secondaryComparatorFactories[1] = secondaryComparatorFactories[0];
 		secondaryComparatorFactories[2] = secondaryComparatorFactories[0];
 		secondaryComparatorFactories[3] = secondaryComparatorFactories[0];
-		
+		secondaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
+		secondaryValueProviderFactories[1] = secondaryValueProviderFactories[0];
+		secondaryValueProviderFactories[2] = secondaryValueProviderFactories[0];
+		secondaryValueProviderFactories[3] = secondaryValueProviderFactories[0];
+
 		loadPrimaryIndexTest();
 		loadPrimaryBTreeIndexTest();
 		loadSecondaryIndexTest();
@@ -283,7 +292,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBTreeSplitProvider, primaryBTreeInteriorFrameFactory,
 				primaryBTreeLeafFrameFactory, primaryBTreeTypeTraits,
-				primaryBTreeComparatorFactories, fieldPermutation, 0.7f,
+				primaryBTreeComparatorFactories, null, fieldPermutation, 0.7f,
 				bTreeopHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBTreeBulkLoad, NC1_ID);
@@ -329,8 +338,8 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, fieldPermutation, 0.7f,
-				opHelperFactory);
+				primaryComparatorFactories, secondaryValueProviderFactories,
+				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
 
@@ -382,8 +391,8 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryRTreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, fieldPermutation, 0.7f,
-				opHelperFactory);
+				secondaryComparatorFactories, secondaryValueProviderFactories,
+				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryRTreeBulkLoad, NC1_ID);
 
@@ -397,77 +406,6 @@
 	}
 
 	@Test
-	public void showPrimaryIndexStats() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(
-				spec, storageManager, treeIndexRegistryProvider,
-				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
-				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryStatsOp, NC1_ID);
-
-		spec.addRoot(primaryStatsOp);
-		runTest(spec);
-	}
-
-	@Test
-	public void searchPrimaryIndexTest() throws Exception {
-		JobSpecification spec = new JobSpecification();
-
-		// build tuple
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount);
-		DataOutput dos = tb.getDataOutput();
-
-		tb.reset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
-		tb.addFieldEndOffset();
-		DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
-		tb.addFieldEndOffset();
-
-		ISerializerDeserializer[] keyRecDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
-		ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(
-				spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(),
-				tb.getSize());
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				keyProviderOp, NC1_ID);
-
-		int[] keyFields = { 0, 1, 2, 3 };
-
-		RTreeSearchOperatorDescriptor primaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(
-				spec, primaryRecDesc, storageManager,
-				treeIndexRegistryProvider, primaryRTreeSplitProvider,
-				primaryInteriorFrameFactory, primaryLeafFrameFactory,
-				primaryTypeTraits, primaryComparatorFactories, keyFields,
-				opHelperFactory);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
-				primaryRTreeSearchOp, NC1_ID);
-
-		PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
-		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer,
-				NC1_ID);
-
-		spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0,
-				primaryRTreeSearchOp, 0);
-		spec.connect(new OneToOneConnectorDescriptor(spec),
-				primaryRTreeSearchOp, 0, printer, 0);
-
-		spec.addRoot(printer);
-		runTest(spec);
-	}
-
-	@Test
 	public void searchSecondaryIndexTest() throws Exception {
 		JobSpecification spec = new JobSpecification();
 
@@ -504,8 +442,8 @@
 				spec, secondaryRecDesc, storageManager,
 				treeIndexRegistryProvider, secondaryRTreeSplitProvider,
 				secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
-				secondaryTypeTraits, secondaryComparatorFactories, keyFields,
-				opHelperFactory);
+				secondaryTypeTraits, secondaryComparatorFactories,
+				secondaryValueProviderFactories, keyFields, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryRTreeSearchOp, NC1_ID);
 
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
index 9b8d4a2..eaf5a81 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
@@ -1,6 +1,7 @@
 package edu.uci.ics.hyracks.storage.am.btree.dataflow;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
@@ -11,13 +12,17 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 
 public class BTreeOpHelper extends TreeIndexOpHelper {
 
+    protected ITreeIndexOperatorDescriptorHelper opDesc;
+
     public BTreeOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, IHyracksTaskContext ctx, int partition,
             IndexHelperOpenMode mode) {
         super(opDesc, ctx, partition, mode);
+        this.opDesc = opDesc;
     }
 
     public ITreeIndex createTreeIndex() throws HyracksDataException {
@@ -29,4 +34,7 @@
                 opDesc.getTreeIndexLeafFactory(), cmp);
     }
 
+    public MultiComparator createMultiComparator(IBinaryComparator[] comparators) throws HyracksDataException {
+        return new MultiComparator(opDesc.getTreeIndexTypeTraits(), comparators);
+    }
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
index 717c99e..c542bb0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
@@ -49,7 +49,7 @@
             IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
             boolean lowKeyInclusive, boolean highKeyInclusive, ITreeIndexOpHelperFactory opHelperFactory) {
         super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
+                leafFrameFactory, typeTraits, comparatorFactories, null, opHelperFactory);
         this.isForward = isForward;
         this.lowKeyFields = lowKeyFields;
         this.highKeyFields = highKeyFields;
@@ -58,9 +58,9 @@
     }
 
     @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, IOperatorEnvironment env,
+    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IOperatorEnvironment env,
             IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
         return new BTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward, lowKeyFields,
                 highKeyFields, lowKeyInclusive, highKeyInclusive);
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 2831ff6..1869a1b 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -1151,7 +1151,7 @@
             ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
 
         if (loaded)
-            throw new HyracksDataException("Trying to bulk-load BTree but has BTree already been loaded.");
+            throw new HyracksDataException("Trying to bulk-load BTree but BTree has already been loaded.");
 
         BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IBTreeLeafFrame) leafFrame,
                 (IBTreeInteriorFrame) interiorFrame, metaFrame);
@@ -1287,4 +1287,4 @@
     public IndexType getIndexType() {
         return IndexType.BTREE;
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
index 5f7c88d..7c80be1 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
@@ -18,7 +18,7 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ICursorInitialState {
-    public ICachedPage getPage();
+	public ICachedPage getPage();
 
-    public void setPage(ICachedPage page);
+	public void setPage(ICachedPage page);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
index 7abc0e3..045ff9d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
@@ -3,23 +3,27 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 
 public interface IFreePageManager {
-    public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+	public int getFreePage(ITreeIndexMetaDataFrame metaFrame)
+			throws HyracksDataException;
 
-    public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException;
+	public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage)
+			throws HyracksDataException;
 
-    public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+	public int getMaxPage(ITreeIndexMetaDataFrame metaFrame)
+			throws HyracksDataException;
 
-    public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException;
+	public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage)
+			throws HyracksDataException;
 
-    public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory();
+	public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory();
 
-    // required to return negative values
-    public byte getMetaPageLevelIndicator();
+	// required to return negative values
+	public byte getMetaPageLevelIndicator();
 
-    public byte getFreePageLevelIndicator();
+	public byte getFreePageLevelIndicator();
 
-    // determined by examining level indicator
-    public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame);
+	// determined by examining level indicator
+	public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame);
 
-    public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame);
+	public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java
new file mode 100644
index 0000000..4696e68
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProvider.java
@@ -0,0 +1,20 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+public interface IPrimitiveValueProvider {
+	public double getValue(byte[] bytes, int offset);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java
new file mode 100644
index 0000000..8e45d0c
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IPrimitiveValueProviderFactory.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.io.Serializable;
+
+public interface IPrimitiveValueProviderFactory extends Serializable {
+	public IPrimitiveValueProvider createPrimitiveValueProvider();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
index f4836e0..a96db28 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
@@ -20,7 +20,7 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public interface ISearchPredicate extends Serializable {
-    public MultiComparator getLowKeyComparator();
+	public MultiComparator getLowKeyComparator();
 
-    public MultiComparator getHighKeyComparator();
+	public MultiComparator getHighKeyComparator();
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
index 29646e7..a6102ab 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
@@ -21,22 +21,23 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public interface ISlotManager {
-    public void setFrame(ITreeIndexFrame frame);
+	public void setFrame(ITreeIndexFrame frame);
 
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
-            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
+	public int findTupleIndex(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
 
-    public int insertSlot(int tupleIndex, int tupleOff);
+	public int insertSlot(int tupleIndex, int tupleOff);
 
-    public int getSlotStartOff();
+	public int getSlotStartOff();
 
-    public int getSlotEndOff();
+	public int getSlotEndOff();
 
-    public int getTupleOff(int slotOff);
+	public int getTupleOff(int slotOff);
 
-    public void setSlot(int slotOff, int value);
+	public void setSlot(int slotOff, int value);
 
-    public int getSlotOff(int tupleIndex);
+	public int getSlotOff(int tupleIndex);
 
-    public int getSlotSize();
+	public int getSlotSize();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
index 246c09d..3a1c8a1 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
@@ -3,23 +3,23 @@
 import java.nio.ByteBuffer;
 
 public interface ISplitKey {
-    public void initData(int keySize);
+	public void initData(int keySize);
 
-    public void reset();
+	public void reset();
 
-    public ByteBuffer getBuffer();
+	public ByteBuffer getBuffer();
 
-    public ITreeIndexTupleReference getTuple();
+	public ITreeIndexTupleReference getTuple();
 
-    public int getLeftPage();
+	public int getLeftPage();
 
-    public int getRightPage();
+	public int getRightPage();
 
-    public void setLeftPage(int leftPage);
+	public void setLeftPage(int leftPage);
 
-    public void setRightPage(int rightPage);
+	public void setRightPage(int rightPage);
 
-    public void setPages(int leftPage, int rightPage);
+	public void setPages(int leftPage, int rightPage);
 
-    public ISplitKey duplicate(ITreeIndexTupleReference copyTuple);
+	public ISplitKey duplicate(ITreeIndexTupleReference copyTuple);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
index 2d77f06..d075285 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
@@ -6,47 +6,56 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
 public interface ITreeIndex {
-    // init:
+	// init:
 
-    public void create(int indexFileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception;
+	public void create(int indexFileId, ITreeIndexFrame leafFrame,
+			ITreeIndexMetaDataFrame metaFrame) throws Exception;
 
-    public void open(int indexFileId);
+	public void open(int indexFileId);
 
-    // operations:
+	// operations:
 
-    public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+	public void insert(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception;
 
-    public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+	public void update(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception;
 
-    public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+	public void delete(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception;
 
-    public IndexOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame);
+	public IndexOpContext createOpContext(IndexOp op,
+			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+			ITreeIndexMetaDataFrame metaFrame);
 
-    // bulk loading:
+	// bulk loading:
 
-    public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
-            ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+	public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
+			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+			ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
 
-    public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException;
+	public void bulkLoadAddTuple(IIndexBulkLoadContext ictx,
+			ITupleReference tuple) throws HyracksDataException;
 
-    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException;
+	public void endBulkLoad(IIndexBulkLoadContext ictx)
+			throws HyracksDataException;
 
-    // search:
-    public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
-            IndexOpContext ictx) throws HyracksDataException;
+	// search:
+	public void diskOrderScan(ITreeIndexCursor icursor,
+			ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+			IndexOpContext ictx) throws HyracksDataException;
 
-    // utility:
+	// utility:
 
-    public IFreePageManager getFreePageManager();
+	public IFreePageManager getFreePageManager();
 
-    public int getRootPageId();
+	public int getRootPageId();
 
-    public ITreeIndexFrameFactory getLeafFrameFactory();
+	public ITreeIndexFrameFactory getLeafFrameFactory();
 
-    public ITreeIndexFrameFactory getInteriorFrameFactory();
+	public ITreeIndexFrameFactory getInteriorFrameFactory();
 
-    public int getFieldCount();
+	public int getFieldCount();
 
-    public IndexType getIndexType();
+	public IndexType getIndexType();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
index 22b2b6f..56dfd47 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
@@ -20,21 +20,22 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ITreeIndexCursor {
-    public void reset();
+	public void reset();
 
-    public boolean hasNext() throws Exception;
+	public boolean hasNext() throws Exception;
 
-    public void next() throws Exception;
+	public void next() throws Exception;
 
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws Exception;
+	public void open(ICursorInitialState initialState,
+			ISearchPredicate searchPred) throws Exception;
 
-    public ICachedPage getPage();
+	public ICachedPage getPage();
 
-    public void close() throws Exception;
+	public void close() throws Exception;
 
-    public void setBufferCache(IBufferCache bufferCache);
+	public void setBufferCache(IBufferCache bufferCache);
 
-    public void setFileId(int fileId);
+	public void setFileId(int fileId);
 
-    public ITupleReference getTuple();
+	public ITupleReference getTuple();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 246efe4..db458ec 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -25,80 +25,86 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ITreeIndexFrame {
-    public void setPage(ICachedPage page);
+	public void setPage(ICachedPage page);
 
-    public ICachedPage getPage();
+	public ICachedPage getPage();
 
-    public ByteBuffer getBuffer();
+	public ByteBuffer getBuffer();
 
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception;
+	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
+			throws Exception;
 
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
+	public void insert(ITupleReference tuple, MultiComparator cmp,
+			int tupleIndex) throws Exception;
 
-    public void update(int rid, ITupleReference tuple) throws Exception;
+	public void update(int rid, ITupleReference tuple) throws Exception;
 
-    public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
+	public void delete(ITupleReference tuple, MultiComparator cmp,
+			boolean exactDelete) throws Exception;
 
-    // returns true if slots were modified, false otherwise
-    public boolean compact(MultiComparator cmp);
+	// returns true if slots were modified, false otherwise
+	public boolean compact(MultiComparator cmp);
 
-    public boolean compress(MultiComparator cmp) throws HyracksDataException;
+	public boolean compress(MultiComparator cmp) throws HyracksDataException;
 
-    public void initBuffer(byte level);
+	public void initBuffer(byte level);
 
-    public int getTupleCount();
+	public int getTupleCount();
 
-    // assumption: page must be write-latched at this point
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
+	// assumption: page must be write-latched at this point
+	public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
+			MultiComparator cmp);
 
-    public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp);
+	public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
+			MultiComparator cmp);
 
-    public int getTupleOffset(int slotNum);
+	public int getTupleOffset(int slotNum);
 
-    public int getTotalFreeSpace();
+	public int getTotalFreeSpace();
 
-    public void setPageLsn(int pageLsn);
+	public void setPageLsn(int pageLsn);
 
-    public int getPageLsn();
+	public int getPageLsn();
 
-    // for debugging
-    public void printHeader();
+	// for debugging
+	public void printHeader();
 
-    public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException;
+	public String printKeys(MultiComparator cmp,
+			ISerializerDeserializer[] fields) throws HyracksDataException;
 
-    // TODO; what if tuples more than half-page size?
-    public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
-            throws Exception;
+	// TODO; what if tuples more than half-page size?
+	public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
+			MultiComparator cmp, ISplitKey splitKey) throws Exception;
 
-    public ISlotManager getSlotManager();
+	public ISlotManager getSlotManager();
 
-    // ATTENTION: in b-tree operations it may not always be possible to
-    // determine whether an ICachedPage is a leaf or interior node
-    // a compatible interior and leaf implementation MUST return identical
-    // values when given the same ByteBuffer for the functions below
-    public boolean isLeaf();
+	// ATTENTION: in b-tree operations it may not always be possible to
+	// determine whether an ICachedPage is a leaf or interior node
+	// a compatible interior and leaf implementation MUST return identical
+	// values when given the same ByteBuffer for the functions below
+	public boolean isLeaf();
 
-    public boolean isInterior();
+	public boolean isInterior();
 
-    public byte getLevel();
+	public byte getLevel();
 
-    public void setLevel(byte level);
+	public void setLevel(byte level);
 
-    public boolean getSmFlag(); // structure modification flag
+	public boolean getSmFlag(); // structure modification flag
 
-    public void setSmFlag(boolean smFlag);
+	public void setSmFlag(boolean smFlag);
 
-    public int getSlotSize();
+	public int getSlotSize();
 
-    // TODO: should be removed after new tuple format
-    public void setPageTupleFieldCount(int fieldCount);
+	// TODO: should be removed after new tuple format
+	public void setPageTupleFieldCount(int fieldCount);
 
-    // for debugging
-    public int getFreeSpaceOff();
+	// for debugging
+	public int getFreeSpaceOff();
 
-    public void setFreeSpaceOff(int freeSpace);
+	public void setFreeSpaceOff(int freeSpace);
 
-    public ITreeIndexTupleWriter getTupleWriter();
+	public ITreeIndexTupleWriter getTupleWriter();
 
-    public int getPageHeaderSize();
+	public int getPageHeaderSize();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
index 83b95b6..9ec69d9 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
@@ -3,5 +3,5 @@
 import java.io.Serializable;
 
 public interface ITreeIndexFrameFactory extends Serializable {
-    public ITreeIndexFrame createFrame();
+	public ITreeIndexFrame createFrame();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
index bbd03d6..4d81e5e6 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
@@ -18,27 +18,27 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ITreeIndexMetaDataFrame {
-    public void initBuffer(int level);
+	public void initBuffer(int level);
 
-    public void setPage(ICachedPage page);
+	public void setPage(ICachedPage page);
 
-    public ICachedPage getPage();
+	public ICachedPage getPage();
 
-    public byte getLevel();
+	public byte getLevel();
 
-    public void setLevel(byte level);
+	public void setLevel(byte level);
 
-    public int getNextPage();
+	public int getNextPage();
 
-    public void setNextPage(int nextPage);
+	public void setNextPage(int nextPage);
 
-    public int getMaxPage();
+	public int getMaxPage();
 
-    public void setMaxPage(int maxPage);
+	public void setMaxPage(int maxPage);
 
-    public int getFreePage();
+	public int getFreePage();
 
-    public boolean hasSpace();
+	public boolean hasSpace();
 
-    public void addFreePage(int freePage);
+	public void addFreePage(int freePage);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
index 6fd88e8..d5625b4 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.api;
 
 public interface ITreeIndexMetaDataFrameFactory {
-    public ITreeIndexMetaDataFrame createFrame();
+	public ITreeIndexMetaDataFrame createFrame();
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
index d2c2df4..8b845ac 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
@@ -20,11 +20,11 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface ITreeIndexTupleReference extends ITupleReference {
-    public void setFieldCount(int fieldCount);
+	public void setFieldCount(int fieldCount);
 
-    public void setFieldCount(int fieldStartIndex, int fieldCount);
+	public void setFieldCount(int fieldStartIndex, int fieldCount);
 
-    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
+	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
 
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
+	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
index 39577ea..6cd12fb 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
@@ -20,18 +20,20 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface ITreeIndexTupleWriter {
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
+	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
+			int targetOff);
 
-    public int bytesRequired(ITupleReference tuple);
+	public int bytesRequired(ITupleReference tuple);
 
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
-            int targetOff);
+	public int writeTupleFields(ITupleReference tuple, int startField,
+			int numFields, ByteBuffer targetBuf, int targetOff);
 
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields);
+	public int bytesRequired(ITupleReference tuple, int startField,
+			int numFields);
 
-    // return a tuplereference instance that can read the tuple written by this
-    // writer
-    // the main idea is that the format of the written tuple may not be the same
-    // as the format written by this writer
-    public ITreeIndexTupleReference createTupleReference();
+	// return a tuplereference instance that can read the tuple written by this
+	// writer
+	// the main idea is that the format of the written tuple may not be the same
+	// as the format written by this writer
+	public ITreeIndexTupleReference createTupleReference();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
index bd7bfda..ea5e740 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
@@ -18,5 +18,5 @@
 import java.io.Serializable;
 
 public interface ITreeIndexTupleWriterFactory extends Serializable {
-    public ITreeIndexTupleWriter createTupleWriter();
+	public ITreeIndexTupleWriter createTupleWriter();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
index d5f9f44..6f83e0b 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
@@ -1,5 +1,5 @@
 package edu.uci.ics.hyracks.storage.am.common.api;
 
 public enum IndexType {
-    BTREE, RTREE, INVERTED
+	BTREE, RTREE, INVERTED
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
index ad3db58..48acb06 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
@@ -17,22 +17,22 @@
 
 public class TreeIndexException extends Exception {
 
-    private static final long serialVersionUID = 1L;
-    private boolean handled = false;
+	private static final long serialVersionUID = 1L;
+	private boolean handled = false;
 
-    public TreeIndexException(Exception e) {
-        super(e);
-    }
+	public TreeIndexException(Exception e) {
+		super(e);
+	}
 
-    public TreeIndexException(String message) {
-        super(message);
-    }
+	public TreeIndexException(String message) {
+		super(message);
+	}
 
-    public void setHandled(boolean handled) {
-        this.handled = handled;
-    }
+	public void setHandled(boolean handled) {
+		this.handled = handled;
+	}
 
-    public boolean getHandled() {
-        return handled;
-    }
+	public boolean getHandled() {
+		return handled;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
index c905eb2..d7c7ab9 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
@@ -21,90 +21,103 @@
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public abstract class AbstractTreeIndexOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor implements
-        ITreeIndexOperatorDescriptorHelper {
+public abstract class AbstractTreeIndexOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor implements
+		ITreeIndexOperatorDescriptorHelper {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    protected final IFileSplitProvider fileSplitProvider;
+	protected final IFileSplitProvider fileSplitProvider;
 
-    protected final IBinaryComparatorFactory[] comparatorFactories;
+	protected final IBinaryComparatorFactory[] comparatorFactories;
+	protected final IPrimitiveValueProviderFactory[] valueProviderFactories;
 
-    protected final ITreeIndexFrameFactory interiorFrameFactory;
-    protected final ITreeIndexFrameFactory leafFrameFactory;
+	protected final ITreeIndexFrameFactory interiorFrameFactory;
+	protected final ITreeIndexFrameFactory leafFrameFactory;
 
-    protected final IStorageManagerInterface storageManager;
-    protected final IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+	protected final IStorageManagerInterface storageManager;
+	protected final IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
 
-    protected final ITypeTrait[] typeTraits;
+	protected final ITypeTrait[] typeTraits;
 
-    protected final ITreeIndexOpHelperFactory opHelperFactory;
+	protected final ITreeIndexOpHelperFactory opHelperFactory;
 
-    public AbstractTreeIndexOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
-            RecordDescriptor recDesc, IStorageManagerInterface storageManager,
-            IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
-            ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
-            ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, inputArity, outputArity);
-        this.fileSplitProvider = fileSplitProvider;
-        this.storageManager = storageManager;
-        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
-        this.interiorFrameFactory = interiorFrameFactory;
-        this.leafFrameFactory = leafFrameFactory;
-        this.typeTraits = typeTraits;
-        this.comparatorFactories = comparatorFactories;
-        this.opHelperFactory = opHelperFactory;
-        if (outputArity > 0)
-            recordDescriptors[0] = recDesc;
-    }
+	public AbstractTreeIndexOperatorDescriptor(JobSpecification spec,
+			int inputArity, int outputArity, RecordDescriptor recDesc,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, inputArity, outputArity);
+		this.fileSplitProvider = fileSplitProvider;
+		this.storageManager = storageManager;
+		this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+		this.interiorFrameFactory = interiorFrameFactory;
+		this.leafFrameFactory = leafFrameFactory;
+		this.typeTraits = typeTraits;
+		this.comparatorFactories = comparatorFactories;
+		this.valueProviderFactories = valueProviderFactories;
+		this.opHelperFactory = opHelperFactory;
+		if (outputArity > 0)
+			recordDescriptors[0] = recDesc;
+	}
 
-    @Override
-    public IFileSplitProvider getTreeIndexFileSplitProvider() {
-        return fileSplitProvider;
-    }
+	@Override
+	public IFileSplitProvider getTreeIndexFileSplitProvider() {
+		return fileSplitProvider;
+	}
 
-    @Override
-    public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
-        return comparatorFactories;
-    }
+	@Override
+	public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
+		return comparatorFactories;
+	}
 
-    @Override
-    public ITypeTrait[] getTreeIndexTypeTraits() {
-        return typeTraits;
-    }
+	@Override
+	public ITypeTrait[] getTreeIndexTypeTraits() {
+		return typeTraits;
+	}
 
-    @Override
-    public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
-        return interiorFrameFactory;
-    }
+	@Override
+	public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories() {
+		return valueProviderFactories;
+	}
 
-    @Override
-    public ITreeIndexFrameFactory getTreeIndexLeafFactory() {
-        return leafFrameFactory;
-    }
+	@Override
+	public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
+		return interiorFrameFactory;
+	}
 
-    @Override
-    public IStorageManagerInterface getStorageManager() {
-        return storageManager;
-    }
+	@Override
+	public ITreeIndexFrameFactory getTreeIndexLeafFactory() {
+		return leafFrameFactory;
+	}
 
-    @Override
-    public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider() {
-        return treeIndexRegistryProvider;
-    }
+	@Override
+	public IStorageManagerInterface getStorageManager() {
+		return storageManager;
+	}
 
-    @Override
-    public RecordDescriptor getRecordDescriptor() {
-        return recordDescriptors[0];
-    }
+	@Override
+	public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider() {
+		return treeIndexRegistryProvider;
+	}
 
-    @Override
-    public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
-        return opHelperFactory;
-    }
-}
+	@Override
+	public RecordDescriptor getRecordDescriptor() {
+		return recordDescriptors[0];
+	}
+
+	@Override
+	public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
+		return opHelperFactory;
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
index 7971a62..ed20de0 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
@@ -20,5 +20,5 @@
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
 public interface IIndexRegistryProvider<IndexType> extends Serializable {
-    public IndexRegistry<IndexType> getRegistry(IHyracksTaskContext ctx);
+	public IndexRegistry<IndexType> getRegistry(IHyracksTaskContext ctx);
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
index cb4d92d..1f734f4 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
@@ -5,6 +5,8 @@
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 
 public interface ITreeIndexOpHelperFactory extends Serializable {
-    public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
-            final IHyracksTaskContext ctx, int partition, IndexHelperOpenMode mode);
+	public TreeIndexOpHelper createTreeIndexOpHelper(
+			ITreeIndexOperatorDescriptorHelper opDesc,
+			final IHyracksTaskContext ctx, int partition,
+			IndexHelperOpenMode mode);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
index 9098a78..79ecb37 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
@@ -5,26 +5,29 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
 public interface ITreeIndexOperatorDescriptorHelper extends IActivity {
-    public IFileSplitProvider getTreeIndexFileSplitProvider();
+	public IFileSplitProvider getTreeIndexFileSplitProvider();
 
-    public IBinaryComparatorFactory[] getTreeIndexComparatorFactories();
+	public IBinaryComparatorFactory[] getTreeIndexComparatorFactories();
 
-    public ITypeTrait[] getTreeIndexTypeTraits();
+	public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories();
 
-    public ITreeIndexFrameFactory getTreeIndexInteriorFactory();
+	public ITypeTrait[] getTreeIndexTypeTraits();
 
-    public ITreeIndexFrameFactory getTreeIndexLeafFactory();
+	public ITreeIndexFrameFactory getTreeIndexInteriorFactory();
 
-    public IStorageManagerInterface getStorageManager();
+	public ITreeIndexFrameFactory getTreeIndexLeafFactory();
 
-    public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider();
+	public IStorageManagerInterface getStorageManager();
 
-    public RecordDescriptor getRecordDescriptor();
+	public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider();
 
-    public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory();
-}
+	public RecordDescriptor getRecordDescriptor();
+
+	public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory();
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
index aa41184..0b27a0e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
@@ -1,5 +1,5 @@
 package edu.uci.ics.hyracks.storage.am.common.dataflow;
 
 public enum IndexHelperOpenMode {
-    OPEN, CREATE, ENLIST
+	OPEN, CREATE, ENLIST
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
index de00d5a..df372f4c 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
@@ -21,33 +21,33 @@
 
 public class IndexRegistry<IndexType> {
 
-    private HashMap<Integer, IndexType> map = new HashMap<Integer, IndexType>();
-    private Lock registryLock = new ReentrantLock();
+	private HashMap<Integer, IndexType> map = new HashMap<Integer, IndexType>();
+	private Lock registryLock = new ReentrantLock();
 
-    public IndexType get(int fileId) {
-        return map.get(fileId);
-    }
+	public IndexType get(int fileId) {
+		return map.get(fileId);
+	}
 
-    public void lock() {
-        registryLock.lock();
-    }
+	public void lock() {
+		registryLock.lock();
+	}
 
-    public void unlock() {
-        registryLock.unlock();
-    }
+	public void unlock() {
+		registryLock.unlock();
+	}
 
-    public void register(int fileId, IndexType index) {
-        map.put(fileId, index);
-    }
+	public void register(int fileId, IndexType index) {
+		map.put(fileId, index);
+	}
 
-    public void unregister(int fileId) {
-        try {
-            map.remove(fileId);
-        } catch (Exception e) {
-        }
-    }
+	public void unregister(int fileId) {
+		try {
+			map.remove(fileId);
+		} catch (Exception e) {
+		}
+	}
 
-    public int size() {
-        return map.size();
-    }
+	public int size() {
+		return map.size();
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
index 3db9db2..0b296f0 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
@@ -19,47 +19,47 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
 
 public class PermutingFrameTupleReference implements IFrameTupleReference {
-    private IFrameTupleAccessor fta;
-    private int tIndex;
-    private int[] fieldPermutation;
+	private IFrameTupleAccessor fta;
+	private int tIndex;
+	private int[] fieldPermutation;
 
-    public void setFieldPermutation(int[] fieldPermutation) {
-        this.fieldPermutation = fieldPermutation;
-    }
+	public void setFieldPermutation(int[] fieldPermutation) {
+		this.fieldPermutation = fieldPermutation;
+	}
 
-    public void reset(IFrameTupleAccessor fta, int tIndex) {
-        this.fta = fta;
-        this.tIndex = tIndex;
-    }
+	public void reset(IFrameTupleAccessor fta, int tIndex) {
+		this.fta = fta;
+		this.tIndex = tIndex;
+	}
 
-    @Override
-    public IFrameTupleAccessor getFrameTupleAccessor() {
-        return fta;
-    }
+	@Override
+	public IFrameTupleAccessor getFrameTupleAccessor() {
+		return fta;
+	}
 
-    @Override
-    public int getTupleIndex() {
-        return tIndex;
-    }
+	@Override
+	public int getTupleIndex() {
+		return tIndex;
+	}
 
-    @Override
-    public int getFieldCount() {
-        return fieldPermutation.length;
-    }
+	@Override
+	public int getFieldCount() {
+		return fieldPermutation.length;
+	}
 
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return fta.getBuffer().array();
-    }
+	@Override
+	public byte[] getFieldData(int fIdx) {
+		return fta.getBuffer().array();
+	}
 
-    @Override
-    public int getFieldStart(int fIdx) {
-        return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength()
-                + fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
-    }
+	@Override
+	public int getFieldStart(int fIdx) {
+		return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength()
+				+ fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
+	}
 
-    @Override
-    public int getFieldLength(int fIdx) {
-        return fta.getFieldLength(tIndex, fieldPermutation[fIdx]);
-    }
+	@Override
+	public int getFieldLength(int fIdx) {
+		return fta.getFieldLength(tIndex, fieldPermutation[fIdx]);
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
index fc12f59..5076c56 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
@@ -23,32 +23,43 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class TreeIndexBulkLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class TreeIndexBulkLoadOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    private final int[] fieldPermutation;
-    private final float fillFactor;
+	private final int[] fieldPermutation;
+	private final float fillFactor;
 
-    public TreeIndexBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
-            IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
-            ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
-            float fillFactor, ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
-        this.fieldPermutation = fieldPermutation;
-        this.fillFactor = fillFactor;
-    }
+	public TreeIndexBulkLoadOperatorDescriptor(JobSpecification spec,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			int[] fieldPermutation, float fillFactor,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, comparatorFactories, valueProviderFactories,
+				opHelperFactory);
+		this.fieldPermutation = fieldPermutation;
+		this.fillFactor = fillFactor;
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
-                recordDescProvider);
-    }
-}
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new TreeIndexBulkLoadOperatorNodePushable(this, ctx, partition,
+				fieldPermutation, fillFactor, recordDescProvider);
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
index aced388..42dfaab 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
@@ -26,64 +26,73 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
 
-public class TreeIndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
-    private float fillFactor;
-    private final TreeIndexOpHelper treeIndexOpHelper;
-    private FrameTupleAccessor accessor;
-    private IIndexBulkLoadContext bulkLoadCtx;
+public class TreeIndexBulkLoadOperatorNodePushable extends
+		AbstractUnaryInputSinkOperatorNodePushable {
+	private float fillFactor;
+	private final TreeIndexOpHelper treeIndexOpHelper;
+	private FrameTupleAccessor accessor;
+	private IIndexBulkLoadContext bulkLoadCtx;
 
-    private IRecordDescriptorProvider recordDescProvider;
+	private IRecordDescriptorProvider recordDescProvider;
 
-    private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+	private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
 
-    public TreeIndexBulkLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.CREATE);
-        this.fillFactor = fillFactor;
-        this.recordDescProvider = recordDescProvider;
-        tuple.setFieldPermutation(fieldPermutation);
-    }
+	public TreeIndexBulkLoadOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
+			float fillFactor, IRecordDescriptorProvider recordDescProvider) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.CREATE);
+		this.fillFactor = fillFactor;
+		this.recordDescProvider = recordDescProvider;
+		tuple.setFieldPermutation(fieldPermutation);
+	}
 
-    @Override
-    public void open() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
-                .getOperatorDescriptor();
-        RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
-        ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
-        try {
-            treeIndexOpHelper.init();
-            treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
-            bulkLoadCtx = treeIndexOpHelper.getTreeIndex().beginBulkLoad(fillFactor, treeIndexOpHelper.getLeafFrame(),
-                    treeIndexOpHelper.getInteriorFrame(), metaFrame);
-        } catch (Exception e) {
-            // cleanup in case of failure
-            treeIndexOpHelper.deinit();
-            throw new HyracksDataException(e);
-        }
-    }
+	@Override
+	public void open() throws HyracksDataException {
+		AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+				.getOperatorDescriptor();
+		RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(
+				opDesc.getOperatorId(), 0);
+		accessor = new FrameTupleAccessor(treeIndexOpHelper
+				.getHyracksTaskContext().getFrameSize(), recDesc);
+		ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
+		try {
+			treeIndexOpHelper.init();
+			treeIndexOpHelper.getTreeIndex().open(
+					treeIndexOpHelper.getIndexFileId());
+			bulkLoadCtx = treeIndexOpHelper.getTreeIndex().beginBulkLoad(
+					fillFactor, treeIndexOpHelper.getLeafFrame(),
+					treeIndexOpHelper.getInteriorFrame(), metaFrame);
+		} catch (Exception e) {
+			// cleanup in case of failure
+			treeIndexOpHelper.deinit();
+			throw new HyracksDataException(e);
+		}
+	}
 
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            tuple.reset(accessor, i);
-            treeIndexOpHelper.getTreeIndex().bulkLoadAddTuple(bulkLoadCtx, tuple);
-        }
-    }
+	@Override
+	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+		accessor.reset(buffer);
+		int tupleCount = accessor.getTupleCount();
+		for (int i = 0; i < tupleCount; i++) {
+			tuple.reset(accessor, i);
+			treeIndexOpHelper.getTreeIndex().bulkLoadAddTuple(bulkLoadCtx,
+					tuple);
+		}
+	}
 
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            treeIndexOpHelper.getTreeIndex().endBulkLoad(bulkLoadCtx);
-        } finally {
-            treeIndexOpHelper.deinit();
-        }
-    }
+	@Override
+	public void close() throws HyracksDataException {
+		try {
+			treeIndexOpHelper.getTreeIndex().endBulkLoad(bulkLoadCtx);
+		} finally {
+			treeIndexOpHelper.deinit();
+		}
+	}
 
-    @Override
-    public void fail() throws HyracksDataException {
-    }
+	@Override
+	public void fail() throws HyracksDataException {
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
index 3a6eb62..5fcb49a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
@@ -27,21 +27,29 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class TreeIndexDiskOrderScanOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class TreeIndexDiskOrderScanOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    public TreeIndexDiskOrderScanOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
-            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits, ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 0, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, null, opHelperFactory);
-    }
+	public TreeIndexDiskOrderScanOperatorDescriptor(JobSpecification spec,
+			RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 0, 1, recDesc, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, null, null, opHelperFactory);
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexDiskOrderScanOperatorNodePushable(this, ctx, partition);
-    }
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new TreeIndexDiskOrderScanOperatorNodePushable(this, ctx,
+				partition);
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
index 2703137..0b22148 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -31,77 +31,91 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
-public class TreeIndexDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-    private final TreeIndexOpHelper treeIndexOpHelper;
+public class TreeIndexDiskOrderScanOperatorNodePushable extends
+		AbstractUnaryOutputSourceOperatorNodePushable {
+	private final TreeIndexOpHelper treeIndexOpHelper;
 
-    public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.OPEN);
-    }
+	public TreeIndexDiskOrderScanOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.OPEN);
+	}
 
-    @Override
-    public void initialize() throws HyracksDataException {
+	@Override
+	public void initialize() throws HyracksDataException {
 
-        ITreeIndexFrame cursorFrame = treeIndexOpHelper.getOperatorDescriptor().getTreeIndexLeafFactory().createFrame();
-        ITreeIndexCursor cursor = treeIndexOpHelper.createDiskOrderScanCursor(cursorFrame);
-        ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
+		ITreeIndexFrame cursorFrame = treeIndexOpHelper.getOperatorDescriptor()
+				.getTreeIndexLeafFactory().createFrame();
+		ITreeIndexCursor cursor = treeIndexOpHelper
+				.createDiskOrderScanCursor(cursorFrame);
+		ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
 
-        IndexOpContext diskOrderScanOpCtx = treeIndexOpHelper.getTreeIndex().createOpContext(IndexOp.DISKORDERSCAN,
-                cursorFrame, null, null);
-        try {
+		IndexOpContext diskOrderScanOpCtx = treeIndexOpHelper
+				.getTreeIndex()
+				.createOpContext(IndexOp.DISKORDERSCAN, cursorFrame, null, null);
+		try {
 
-            treeIndexOpHelper.init();
-            writer.open();
-            try {
-                treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor, cursorFrame, metaFrame, diskOrderScanOpCtx);
+			treeIndexOpHelper.init();
+			writer.open();
+			try {
+				treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor,
+						cursorFrame, metaFrame, diskOrderScanOpCtx);
 
-                int fieldCount = treeIndexOpHelper.getTreeIndex().getFieldCount();
-                ByteBuffer frame = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-                FrameTupleAppender appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext()
-                        .getFrameSize());
-                appender.reset(frame, true);
-                ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
-                DataOutput dos = tb.getDataOutput();
+				int fieldCount = treeIndexOpHelper.getTreeIndex()
+						.getFieldCount();
+				ByteBuffer frame = treeIndexOpHelper.getHyracksTaskContext()
+						.allocateFrame();
+				FrameTupleAppender appender = new FrameTupleAppender(
+						treeIndexOpHelper.getHyracksTaskContext()
+								.getFrameSize());
+				appender.reset(frame, true);
+				ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+				DataOutput dos = tb.getDataOutput();
 
-                while (cursor.hasNext()) {
-                    tb.reset();
-                    cursor.next();
+				while (cursor.hasNext()) {
+					tb.reset();
+					cursor.next();
 
-                    ITupleReference frameTuple = cursor.getTuple();
-                    for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                        dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-                        tb.addFieldEndOffset();
-                    }
+					ITupleReference frameTuple = cursor.getTuple();
+					for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+						dos.write(frameTuple.getFieldData(i),
+								frameTuple.getFieldStart(i),
+								frameTuple.getFieldLength(i));
+						tb.addFieldEndOffset();
+					}
 
-                    if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                        FrameUtils.flushFrame(frame, writer);
-                        appender.reset(frame, true);
-                        if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                            throw new IllegalStateException();
-                        }
-                    }
-                }
+					if (!appender.append(tb.getFieldEndOffsets(),
+							tb.getByteArray(), 0, tb.getSize())) {
+						FrameUtils.flushFrame(frame, writer);
+						appender.reset(frame, true);
+						if (!appender.append(tb.getFieldEndOffsets(),
+								tb.getByteArray(), 0, tb.getSize())) {
+							throw new IllegalStateException();
+						}
+					}
+				}
 
-                if (appender.getTupleCount() > 0) {
-                    FrameUtils.flushFrame(frame, writer);
-                }
-            } catch (Exception e) {
-                writer.fail();
-                throw new HyracksDataException(e);
-            } finally {
-                cursor.close();
-                writer.close();
-            }
+				if (appender.getTupleCount() > 0) {
+					FrameUtils.flushFrame(frame, writer);
+				}
+			} catch (Exception e) {
+				writer.fail();
+				throw new HyracksDataException(e);
+			} finally {
+				cursor.close();
+				writer.close();
+			}
 
-        } catch (Exception e) {
-            deinitialize();
-            throw new HyracksDataException(e);
-        }
-    }
+		} catch (Exception e) {
+			deinitialize();
+			throw new HyracksDataException(e);
+		}
+	}
 
-    @Override
-    public void deinitialize() throws HyracksDataException {
-        treeIndexOpHelper.deinit();
-    }
+	@Override
+	public void deinitialize() throws HyracksDataException {
+		treeIndexOpHelper.deinit();
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
index 2b1de29..0053877 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
@@ -25,26 +25,31 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class TreeIndexDropOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+public class TreeIndexDropOperatorDescriptor extends
+		AbstractSingleActivityOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    private IStorageManagerInterface storageManager;
-    private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
-    private IFileSplitProvider fileSplitProvider;
+	private IStorageManagerInterface storageManager;
+	private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+	private IFileSplitProvider fileSplitProvider;
 
-    public TreeIndexDropOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
-            IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider) {
-        super(spec, 0, 0);
-        this.storageManager = storageManager;
-        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
-        this.fileSplitProvider = fileSplitProvider;
-    }
+	public TreeIndexDropOperatorDescriptor(JobSpecification spec,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider) {
+		super(spec, 0, 0);
+		this.storageManager = storageManager;
+		this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+		this.fileSplitProvider = fileSplitProvider;
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexDropOperatorNodePushable(ctx, storageManager, treeIndexRegistryProvider, fileSplitProvider,
-                partition);
-    }
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new TreeIndexDropOperatorNodePushable(ctx, storageManager,
+				treeIndexRegistryProvider, fileSplitProvider, partition);
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
index 9b4ecb0..6614fba 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
@@ -30,79 +30,87 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 
-public class TreeIndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
-    private static final Logger LOGGER = Logger.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
+public class TreeIndexDropOperatorNodePushable extends
+		AbstractOperatorNodePushable {
+	private static final Logger LOGGER = Logger
+			.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
 
-    private final IHyracksTaskContext ctx;
-    private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
-    private IStorageManagerInterface storageManager;
-    private IFileSplitProvider fileSplitProvider;
-    private int partition;
+	private final IHyracksTaskContext ctx;
+	private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+	private IStorageManagerInterface storageManager;
+	private IFileSplitProvider fileSplitProvider;
+	private int partition;
 
-    public TreeIndexDropOperatorNodePushable(IHyracksTaskContext ctx, IStorageManagerInterface storageManager,
-            IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
-            int partition) {
-        this.ctx = ctx;
-        this.storageManager = storageManager;
-        this.treeIndexRegistryProvider = treeIndexRegistryProvider;
-        this.fileSplitProvider = fileSplitProvider;
-        this.partition = partition;
-    }
+	public TreeIndexDropOperatorNodePushable(IHyracksTaskContext ctx,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider, int partition) {
+		this.ctx = ctx;
+		this.storageManager = storageManager;
+		this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+		this.fileSplitProvider = fileSplitProvider;
+		this.partition = partition;
+	}
 
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
+	@Override
+	public void deinitialize() throws HyracksDataException {
+	}
 
-    @Override
-    public int getInputArity() {
-        return 0;
-    }
+	@Override
+	public int getInputArity() {
+		return 0;
+	}
 
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
+	@Override
+	public IFrameWriter getInputFrameWriter(int index) {
+		return null;
+	}
 
-    @Override
-    public void initialize() throws HyracksDataException {
-        try {
+	@Override
+	public void initialize() throws HyracksDataException {
+		try {
 
-            IndexRegistry<ITreeIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
-            IBufferCache bufferCache = storageManager.getBufferCache(ctx);
-            IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
+			IndexRegistry<ITreeIndex> treeIndexRegistry = treeIndexRegistryProvider
+					.getRegistry(ctx);
+			IBufferCache bufferCache = storageManager.getBufferCache(ctx);
+			IFileMapProvider fileMapProvider = storageManager
+					.getFileMapProvider(ctx);
 
-            FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+			FileReference f = fileSplitProvider.getFileSplits()[partition]
+					.getLocalFile();
 
-            boolean fileIsMapped = fileMapProvider.isMapped(f);
-            if (!fileIsMapped) {
-                throw new HyracksDataException("Cannot drop Tree with name " + f.toString()
-                        + ". No file mapping exists.");
-            }
+			boolean fileIsMapped = fileMapProvider.isMapped(f);
+			if (!fileIsMapped) {
+				throw new HyracksDataException("Cannot drop Tree with name "
+						+ f.toString() + ". No file mapping exists.");
+			}
 
-            int indexFileId = fileMapProvider.lookupFileId(f);
+			int indexFileId = fileMapProvider.lookupFileId(f);
 
-            // unregister tree instance
-            treeIndexRegistry.lock();
-            try {
-                treeIndexRegistry.unregister(indexFileId);
-            } finally {
-                treeIndexRegistry.unlock();
-            }
+			// unregister tree instance
+			treeIndexRegistry.lock();
+			try {
+				treeIndexRegistry.unregister(indexFileId);
+			} finally {
+				treeIndexRegistry.unlock();
+			}
 
-            // remove name to id mapping
-            bufferCache.deleteFile(indexFileId);
-        }
-        // TODO: for the time being we don't throw,
-        // with proper exception handling (no hanging job problem) we should
-        // throw
-        catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Tree Drop Operator Failed Due To Exception: " + e.getMessage());
-            }
-        }
-    }
+			// remove name to id mapping
+			bufferCache.deleteFile(indexFileId);
+		}
+		// TODO: for the time being we don't throw,
+		// with proper exception handling (no hanging job problem) we should
+		// throw
+		catch (Exception e) {
+			if (LOGGER.isLoggable(Level.WARNING)) {
+				LOGGER.warning("Tree Drop Operator Failed Due To Exception: "
+						+ e.getMessage());
+			}
+		}
+	}
 
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-    }
+	@Override
+	public void setOutputFrameWriter(int index, IFrameWriter writer,
+			RecordDescriptor recordDesc) {
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
index 7812198..60fa8cc 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
@@ -25,6 +25,7 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -34,23 +35,33 @@
 // 2. create file mappings (FileMappingProvider)
 // 3. register tree index instance (IndexRegistry)
 
-public class TreeIndexFileEnlistmentOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class TreeIndexFileEnlistmentOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    public TreeIndexFileEnlistmentOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
-            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 0, 0, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
-    }
+	public TreeIndexFileEnlistmentOperatorDescriptor(JobSpecification spec,
+			RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 0, 0, recDesc, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, comparatorFactories, valueProviderFactories,
+				opHelperFactory);
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int partitions) throws HyracksDataException {
-        return new TreeIndexFileEnlistmentOperatorNodePushable(this, ctx, partition);
-    }
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int partitions) throws HyracksDataException {
+		return new TreeIndexFileEnlistmentOperatorNodePushable(this, ctx,
+				partition);
+	}
 
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
index 9cbde53..124e2cc 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
@@ -21,40 +21,44 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
 
-public class TreeIndexFileEnlistmentOperatorNodePushable extends AbstractOperatorNodePushable {
+public class TreeIndexFileEnlistmentOperatorNodePushable extends
+		AbstractOperatorNodePushable {
 
-    private final TreeIndexOpHelper treeIndexOpHelper;
+	private final TreeIndexOpHelper treeIndexOpHelper;
 
-    public TreeIndexFileEnlistmentOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.ENLIST);
-    }
+	public TreeIndexFileEnlistmentOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.ENLIST);
+	}
 
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
+	@Override
+	public void deinitialize() throws HyracksDataException {
+	}
 
-    @Override
-    public int getInputArity() {
-        return 0;
-    }
+	@Override
+	public int getInputArity() {
+		return 0;
+	}
 
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
+	@Override
+	public IFrameWriter getInputFrameWriter(int index) {
+		return null;
+	}
 
-    @Override
-    public void initialize() throws HyracksDataException {
-        try {
-            treeIndexOpHelper.init();
-        } finally {
-            treeIndexOpHelper.deinit();
-        }
-    }
+	@Override
+	public void initialize() throws HyracksDataException {
+		try {
+			treeIndexOpHelper.init();
+		} finally {
+			treeIndexOpHelper.deinit();
+		}
+	}
 
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-    }
+	@Override
+	public void setOutputFrameWriter(int index, IFrameWriter writer,
+			RecordDescriptor recordDesc) {
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
index 0457768..b41bd98 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -24,35 +24,45 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class TreeIndexInsertUpdateDeleteOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class TreeIndexInsertUpdateDeleteOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    private final int[] fieldPermutation;
+	private final int[] fieldPermutation;
 
-    private IndexOp op;
+	private IndexOp op;
 
-    public TreeIndexInsertUpdateDeleteOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
-            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, IndexOp op,
-            ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
-        this.fieldPermutation = fieldPermutation;
-        this.op = op;
-    }
+	public TreeIndexInsertUpdateDeleteOperatorDescriptor(JobSpecification spec,
+			RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			int[] fieldPermutation, IndexOp op,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, comparatorFactories, valueProviderFactories,
+				opHelperFactory);
+		this.fieldPermutation = fieldPermutation;
+		this.op = op;
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
-                recordDescProvider, op);
-    }
-}
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new TreeIndexInsertUpdateDeleteOperatorNodePushable(this, ctx,
+				partition, fieldPermutation, recordDescProvider, op);
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
index 5779805..cc49998 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -28,95 +28,104 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
-public class TreeIndexInsertUpdateDeleteOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    private final TreeIndexOpHelper treeIndexOpHelper;
-    private FrameTupleAccessor accessor;
-    private final IRecordDescriptorProvider recordDescProvider;
-    private final IndexOp op;
-    private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-    private ByteBuffer writeBuffer;
-    private IndexOpContext opCtx;
+public class TreeIndexInsertUpdateDeleteOperatorNodePushable extends
+		AbstractUnaryInputUnaryOutputOperatorNodePushable {
+	private final TreeIndexOpHelper treeIndexOpHelper;
+	private FrameTupleAccessor accessor;
+	private final IRecordDescriptorProvider recordDescProvider;
+	private final IndexOp op;
+	private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+	private ByteBuffer writeBuffer;
+	private IndexOpContext opCtx;
 
-    public TreeIndexInsertUpdateDeleteOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
-            IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
-            IRecordDescriptorProvider recordDescProvider, IndexOp op) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.OPEN);
-        this.recordDescProvider = recordDescProvider;
-        this.op = op;
-        tuple.setFieldPermutation(fieldPermutation);
-    }
+	public TreeIndexInsertUpdateDeleteOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition, int[] fieldPermutation,
+			IRecordDescriptorProvider recordDescProvider, IndexOp op) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.OPEN);
+		this.recordDescProvider = recordDescProvider;
+		this.op = op;
+		tuple.setFieldPermutation(fieldPermutation);
+	}
 
-    @Override
-    public void open() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
-                .getOperatorDescriptor();
-        RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), inputRecDesc);
-        writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-        writer.open();
-        try {
-            treeIndexOpHelper.init();
-            treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
-            opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op, treeIndexOpHelper.getLeafFrame(),
-                    treeIndexOpHelper.getInteriorFrame(), new LIFOMetaDataFrame());
-        } catch (Exception e) {
-            // cleanup in case of failure
-            treeIndexOpHelper.deinit();
-            throw new HyracksDataException(e);
-        }
-    }
+	@Override
+	public void open() throws HyracksDataException {
+		AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+				.getOperatorDescriptor();
+		RecordDescriptor inputRecDesc = recordDescProvider
+				.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+		accessor = new FrameTupleAccessor(treeIndexOpHelper
+				.getHyracksTaskContext().getFrameSize(), inputRecDesc);
+		writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
+		writer.open();
+		try {
+			treeIndexOpHelper.init();
+			treeIndexOpHelper.getTreeIndex().open(
+					treeIndexOpHelper.getIndexFileId());
+			opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op,
+					treeIndexOpHelper.getLeafFrame(),
+					treeIndexOpHelper.getInteriorFrame(),
+					new LIFOMetaDataFrame());
+		} catch (Exception e) {
+			// cleanup in case of failure
+			treeIndexOpHelper.deinit();
+			throw new HyracksDataException(e);
+		}
+	}
 
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        final ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
-        accessor.reset(buffer);
+	@Override
+	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+		final ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
+		accessor.reset(buffer);
 
-        int tupleCount = accessor.getTupleCount();
-        for (int i = 0; i < tupleCount; i++) {
-            tuple.reset(accessor, i);
-            try {
-                switch (op) {
+		int tupleCount = accessor.getTupleCount();
+		for (int i = 0; i < tupleCount; i++) {
+			tuple.reset(accessor, i);
+			try {
+				switch (op) {
 
-                    case INSERT: {
-                        treeIndex.insert(tuple, opCtx);
-                    }
-                        break;
+				case INSERT: {
+					treeIndex.insert(tuple, opCtx);
+				}
+					break;
 
-                    case DELETE: {
-                        treeIndex.delete(tuple, opCtx);
-                    }
-                        break;
+				case DELETE: {
+					treeIndex.delete(tuple, opCtx);
+				}
+					break;
 
-                    default: {
-                        throw new HyracksDataException("Unsupported operation " + op
-                                + " in tree index InsertUpdateDelete operator");
-                    }
+				default: {
+					throw new HyracksDataException("Unsupported operation "
+							+ op + " in tree index InsertUpdateDelete operator");
+				}
 
-                }
+				}
 
-            } catch (Exception e) {
-                e.printStackTrace();
-                throw new HyracksDataException(e);
-            }
-        }
+			} catch (Exception e) {
+				e.printStackTrace();
+				throw new HyracksDataException(e);
+			}
+		}
 
-        // pass a copy of the frame to next op
-        System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
-        FrameUtils.flushFrame(writeBuffer, writer);
-    }
+		// pass a copy of the frame to next op
+		System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0,
+				buffer.capacity());
+		FrameUtils.flushFrame(writeBuffer, writer);
+	}
 
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            writer.close();
-        } finally {
-            treeIndexOpHelper.deinit();
-        }
-    }
+	@Override
+	public void close() throws HyracksDataException {
+		try {
+			writer.close();
+		} finally {
+			treeIndexOpHelper.deinit();
+		}
+	}
 
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
+	@Override
+	public void fail() throws HyracksDataException {
+		writer.fail();
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
index af98bb3..f9fd77d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
@@ -30,147 +30,170 @@
 
 public abstract class TreeIndexOpHelper {
 
-    protected ITreeIndexFrame interiorFrame;
-    protected ITreeIndexFrame leafFrame;
-    protected MultiComparator cmp;
+	protected ITreeIndexFrame interiorFrame;
+	protected ITreeIndexFrame leafFrame;
+	protected MultiComparator cmp;
 
-    protected ITreeIndex treeIndex;
-    protected int indexFileId = -1;
-    protected int partition;
+	protected ITreeIndex treeIndex;
+	protected int indexFileId = -1;
+	protected int partition;
 
-    protected ITreeIndexOperatorDescriptorHelper opDesc;
-    protected IHyracksTaskContext ctx;
+	protected ITreeIndexOperatorDescriptorHelper opDesc;
+	protected IHyracksTaskContext ctx;
 
-    protected IndexHelperOpenMode mode;
+	protected IndexHelperOpenMode mode;
 
-    public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksTaskContext ctx, int partition,
-            IndexHelperOpenMode mode) {
-        this.opDesc = opDesc;
-        this.ctx = ctx;
-        this.mode = mode;
-        this.partition = partition;
-    }
+	public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+			final IHyracksTaskContext ctx, int partition,
+			IndexHelperOpenMode mode) {
+		this.opDesc = opDesc;
+		this.ctx = ctx;
+		this.mode = mode;
+		this.partition = partition;
+	}
 
-    public void init() throws HyracksDataException {
-        IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-        IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
-        IFileSplitProvider fileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
+	public void init() throws HyracksDataException {
+		IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(
+				ctx);
+		IFileMapProvider fileMapProvider = opDesc.getStorageManager()
+				.getFileMapProvider(ctx);
+		IFileSplitProvider fileSplitProvider = opDesc
+				.getTreeIndexFileSplitProvider();
 
-        FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
-        boolean fileIsMapped = fileMapProvider.isMapped(f);
+		FileReference f = fileSplitProvider.getFileSplits()[partition]
+				.getLocalFile();
+		boolean fileIsMapped = fileMapProvider.isMapped(f);
 
-        switch (mode) {
+		switch (mode) {
 
-            case OPEN: {
-                if (!fileIsMapped) {
-                    throw new HyracksDataException("Trying to open tree index from unmapped file " + f.toString());
-                }
-            }
-                break;
+		case OPEN: {
+			if (!fileIsMapped) {
+				throw new HyracksDataException(
+						"Trying to open tree index from unmapped file "
+								+ f.toString());
+			}
+		}
+			break;
 
-            case CREATE:
-            case ENLIST: {
-                if (!fileIsMapped) {
-                    bufferCache.createFile(f);
-                }
-            }
-                break;
+		case CREATE:
+		case ENLIST: {
+			if (!fileIsMapped) {
+				bufferCache.createFile(f);
+			}
+		}
+			break;
 
-        }
+		}
 
-        int fileId = fileMapProvider.lookupFileId(f);
-        try {
-            bufferCache.openFile(fileId);
-        } catch (HyracksDataException e) {
-            // revert state of buffer cache since file failed to open
-            if (!fileIsMapped) {
-                bufferCache.deleteFile(fileId);
-            }
-            throw e;
-        }
+		int fileId = fileMapProvider.lookupFileId(f);
+		try {
+			bufferCache.openFile(fileId);
+		} catch (HyracksDataException e) {
+			// revert state of buffer cache since file failed to open
+			if (!fileIsMapped) {
+				bufferCache.deleteFile(fileId);
+			}
+			throw e;
+		}
 
-        // only set indexFileId member when openFile() succeeds,
-        // otherwise deinit() will try to close the file that failed to open
-        indexFileId = fileId;
+		// only set indexFileId member when openFile() succeeds,
+		// otherwise deinit() will try to close the file that failed to open
+		indexFileId = fileId;
 
-        interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
-        leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+		interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+		leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
 
-        IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider().getRegistry(ctx);
-        treeIndex = treeIndexRegistry.get(indexFileId);
-        if (treeIndex == null) {
+		IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc
+				.getTreeIndexRegistryProvider().getRegistry(ctx);
+		treeIndex = treeIndexRegistry.get(indexFileId);
+		if (treeIndex == null) {
 
-            // create new tree and register it
-            treeIndexRegistry.lock();
-            try {
-                // check if tree has already been registered by another thread
-                treeIndex = treeIndexRegistry.get(indexFileId);
-                if (treeIndex == null) {
-                    // this thread should create and register the tree
+			// create new tree and register it
+			treeIndexRegistry.lock();
+			try {
+				// check if tree has already been registered by another thread
+				treeIndex = treeIndexRegistry.get(indexFileId);
+				if (treeIndex == null) {
+					// this thread should create and register the tree
 
-                    IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getTreeIndexComparatorFactories().length];
-                    for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
-                        comparators[i] = opDesc.getTreeIndexComparatorFactories()[i].createBinaryComparator();
-                    }
+					IBinaryComparator[] comparators = new IBinaryComparator[opDesc
+							.getTreeIndexComparatorFactories().length];
+					for (int i = 0; i < opDesc
+							.getTreeIndexComparatorFactories().length; i++) {
+						comparators[i] = opDesc
+								.getTreeIndexComparatorFactories()[i]
+								.createBinaryComparator();
+					}
 
-                    cmp = new MultiComparator(opDesc.getTreeIndexTypeTraits(), comparators);
+					cmp = createMultiComparator(comparators);
 
-                    treeIndex = createTreeIndex();
-                    if (mode == IndexHelperOpenMode.CREATE) {
-                        ITreeIndexMetaDataFrame metaFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory()
-                                .createFrame();
-                        try {
-                            treeIndex.create(indexFileId, leafFrame, metaFrame);
-                        } catch (Exception e) {
-                            throw new HyracksDataException(e);
-                        }
-                    }
-                    treeIndex.open(indexFileId);
-                    treeIndexRegistry.register(indexFileId, treeIndex);
-                }
-            } finally {
-                treeIndexRegistry.unlock();
-            }
-        }
-    }
+					treeIndex = createTreeIndex();
+					if (mode == IndexHelperOpenMode.CREATE) {
+						ITreeIndexMetaDataFrame metaFrame = treeIndex
+								.getFreePageManager().getMetaDataFrameFactory()
+								.createFrame();
+						try {
+							treeIndex.create(indexFileId, leafFrame, metaFrame);
+						} catch (Exception e) {
+							throw new HyracksDataException(e);
+						}
+					}
+					treeIndex.open(indexFileId);
+					treeIndexRegistry.register(indexFileId, treeIndex);
+				}
+			} finally {
+				treeIndexRegistry.unlock();
+			}
+		}
+	}
 
-    // MUST be overridden
-    public ITreeIndex createTreeIndex() throws HyracksDataException {
-        throw new HyracksDataException("createTreeIndex Operation not implemented.");
-    }
+	// MUST be overridden
+	public ITreeIndex createTreeIndex() throws HyracksDataException {
+		throw new HyracksDataException(
+				"createTreeIndex Operation not implemented.");
+	}
 
-    public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
-        return new TreeDiskOrderScanCursor(leafFrame);
-    }
+	// MUST be overridden
+	public MultiComparator createMultiComparator(IBinaryComparator[] comparators)
+			throws HyracksDataException {
+		throw new HyracksDataException(
+				"createComparator Operation not implemented.");
+	}
 
-    public void deinit() throws HyracksDataException {
-        if (indexFileId != -1) {
-            IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-            bufferCache.closeFile(indexFileId);
-        }
-    }
+	public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame)
+			throws HyracksDataException {
+		return new TreeDiskOrderScanCursor(leafFrame);
+	}
 
-    public ITreeIndex getTreeIndex() {
-        return treeIndex;
-    }
+	public void deinit() throws HyracksDataException {
+		if (indexFileId != -1) {
+			IBufferCache bufferCache = opDesc.getStorageManager()
+					.getBufferCache(ctx);
+			bufferCache.closeFile(indexFileId);
+		}
+	}
 
-    public IHyracksTaskContext getHyracksTaskContext() {
-        return ctx;
-    }
+	public ITreeIndex getTreeIndex() {
+		return treeIndex;
+	}
 
-    public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
-        return opDesc;
-    }
+	public IHyracksTaskContext getHyracksTaskContext() {
+		return ctx;
+	}
 
-    public ITreeIndexFrame getLeafFrame() {
-        return leafFrame;
-    }
+	public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+		return opDesc;
+	}
 
-    public ITreeIndexFrame getInteriorFrame() {
-        return interiorFrame;
-    }
+	public ITreeIndexFrame getLeafFrame() {
+		return leafFrame;
+	}
 
-    public int getIndexFileId() {
-        return indexFileId;
-    }
+	public ITreeIndexFrame getInteriorFrame() {
+		return interiorFrame;
+	}
+
+	public int getIndexFileId() {
+		return indexFileId;
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
index d221e25..d1a1614 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
@@ -12,22 +12,29 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class TreeIndexStatsOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class TreeIndexStatsOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    public TreeIndexStatsOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
-            IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
-            ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
-            ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
-    }
+	public TreeIndexStatsOperatorDescriptor(JobSpecification spec,
+			IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, comparatorFactories, null, opHelperFactory);
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx, IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new TreeIndexStatsOperatorNodePushable(this, ctx, partition);
-    }
-}
+	@Override
+	public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
+			IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new TreeIndexStatsOperatorNodePushable(this, ctx, partition);
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
index 8c825c2..4b53453 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -24,55 +24,65 @@
 import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 
-public class TreeIndexStatsOperatorNodePushable extends AbstractOperatorNodePushable {
-    private final TreeIndexOpHelper treeIndexOpHelper;
-    private final IHyracksTaskContext ctx;
-    private TreeIndexStatsGatherer statsGatherer;
+public class TreeIndexStatsOperatorNodePushable extends
+		AbstractOperatorNodePushable {
+	private final TreeIndexOpHelper treeIndexOpHelper;
+	private final IHyracksTaskContext ctx;
+	private TreeIndexStatsGatherer statsGatherer;
 
-    public TreeIndexStatsOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.CREATE);
-        this.ctx = ctx;
-    }
+	public TreeIndexStatsOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.CREATE);
+		this.ctx = ctx;
+	}
 
-    @Override
-    public void deinitialize() throws HyracksDataException {
-    }
+	@Override
+	public void deinitialize() throws HyracksDataException {
+	}
 
-    @Override
-    public int getInputArity() {
-        return 0;
-    }
+	@Override
+	public int getInputArity() {
+		return 0;
+	}
 
-    @Override
-    public IFrameWriter getInputFrameWriter(int index) {
-        return null;
-    }
+	@Override
+	public IFrameWriter getInputFrameWriter(int index) {
+		return null;
+	}
 
-    @Override
-    public void initialize() throws HyracksDataException {
-        try {
-            treeIndexOpHelper.init();
-            treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+	@Override
+	public void initialize() throws HyracksDataException {
+		try {
+			treeIndexOpHelper.init();
+			treeIndexOpHelper.getTreeIndex().open(
+					treeIndexOpHelper.getIndexFileId());
 
-            ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
-            IBufferCache bufferCache = treeIndexOpHelper.getOperatorDescriptor().getStorageManager()
-                    .getBufferCache(ctx);
+			ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
+			IBufferCache bufferCache = treeIndexOpHelper
+					.getOperatorDescriptor().getStorageManager()
+					.getBufferCache(ctx);
 
-            statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(),
-                    treeIndexOpHelper.getIndexFileId(), treeIndex.getRootPageId());
-            TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex
-                    .getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory()
-                    .createFrame());
-            System.err.println(stats.toString());
-        } catch (Exception e) {
-            treeIndexOpHelper.deinit();
-            throw new HyracksDataException(e);
-        }
-    }
+			statsGatherer = new TreeIndexStatsGatherer(bufferCache,
+					treeIndex.getFreePageManager(),
+					treeIndexOpHelper.getIndexFileId(),
+					treeIndex.getRootPageId());
+			TreeIndexStats stats = statsGatherer.gatherStats(treeIndex
+					.getLeafFrameFactory().createFrame(), treeIndex
+					.getInteriorFrameFactory().createFrame(), treeIndex
+					.getFreePageManager().getMetaDataFrameFactory()
+					.createFrame());
+			System.err.println(stats.toString());
+		} catch (Exception e) {
+			treeIndexOpHelper.deinit();
+			throw new HyracksDataException(e);
+		}
+	}
 
-    @Override
-    public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
-    }
+	@Override
+	public void setOutputFrameWriter(int index, IFrameWriter writer,
+			RecordDescriptor recordDesc) {
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
index 87fea47..bce2e19 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -20,41 +20,42 @@
 
 public abstract class AbstractSlotManager implements ISlotManager {
 
-    protected static final int slotSize = 4;
-    protected ITreeIndexFrame frame;
+	protected static final int slotSize = 4;
+	protected ITreeIndexFrame frame;
 
-    @Override
-    public int getTupleOff(int offset) {
-        return frame.getBuffer().getInt(offset);
-    }
+	@Override
+	public int getTupleOff(int offset) {
+		return frame.getBuffer().getInt(offset);
+	}
 
-    @Override
-    public void setSlot(int offset, int value) {
-        frame.getBuffer().putInt(offset, value);
-    }
+	@Override
+	public void setSlot(int offset, int value) {
+		frame.getBuffer().putInt(offset, value);
+	}
 
-    @Override
-    public int getSlotEndOff() {
-        return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
-    }
+	@Override
+	public int getSlotEndOff() {
+		return frame.getBuffer().capacity()
+				- (frame.getTupleCount() * slotSize);
+	}
 
-    @Override
-    public int getSlotStartOff() {
-        return frame.getBuffer().capacity() - slotSize;
-    }
+	@Override
+	public int getSlotStartOff() {
+		return frame.getBuffer().capacity() - slotSize;
+	}
 
-    @Override
-    public int getSlotSize() {
-        return slotSize;
-    }
+	@Override
+	public int getSlotSize() {
+		return slotSize;
+	}
 
-    @Override
-    public void setFrame(ITreeIndexFrame frame) {
-        this.frame = frame;
-    }
+	@Override
+	public void setFrame(ITreeIndexFrame frame) {
+		this.frame = frame;
+	}
 
-    @Override
-    public int getSlotOff(int tupleIndex) {
-        return getSlotStartOff() - tupleIndex * slotSize;
-    }
+	@Override
+	public int getSlotOff(int tupleIndex) {
+		return getSlotStartOff() - tupleIndex * slotSize;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
index 97a4730..7114875 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.frames;
 
 public enum FrameOpSpaceStatus {
-    INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
+	INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
index b621b89..b0ec13d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -27,89 +27,89 @@
 
 public class LIFOMetaDataFrame implements ITreeIndexMetaDataFrame {
 
-    protected static final int tupleCountOff = 0;
-    protected static final int freeSpaceOff = tupleCountOff + 4;
-    protected static final int maxPageOff = freeSpaceOff + 4;
-    protected static final int dummyFieldOff = maxPageOff + 4;
-    protected static final byte levelOff = dummyFieldOff + 4;
-    protected static final byte nextPageOff = levelOff + 1;
+	protected static final int tupleCountOff = 0;
+	protected static final int freeSpaceOff = tupleCountOff + 4;
+	protected static final int maxPageOff = freeSpaceOff + 4;
+	protected static final int dummyFieldOff = maxPageOff + 4;
+	protected static final byte levelOff = dummyFieldOff + 4;
+	protected static final byte nextPageOff = levelOff + 1;
 
-    protected ICachedPage page = null;
-    protected ByteBuffer buf = null;
+	protected ICachedPage page = null;
+	protected ByteBuffer buf = null;
 
-    public int getMaxPage() {
-        return buf.getInt(maxPageOff);
-    }
+	public int getMaxPage() {
+		return buf.getInt(maxPageOff);
+	}
 
-    public void setMaxPage(int maxPage) {
-        buf.putInt(maxPageOff, maxPage);
-    }
+	public void setMaxPage(int maxPage) {
+		buf.putInt(maxPageOff, maxPage);
+	}
 
-    public int getFreePage() {
-        int tupleCount = buf.getInt(tupleCountOff);
-        if (tupleCount > 0) {
-            // return the last page from the linked list of free pages
-            // TODO: this is a dumb policy, but good enough for now
-            int lastPageOff = buf.getInt(freeSpaceOff) - 4;
-            buf.putInt(freeSpaceOff, lastPageOff);
-            buf.putInt(tupleCountOff, tupleCount - 1);
-            return buf.getInt(lastPageOff);
-        } else {
-            return -1;
-        }
-    }
+	public int getFreePage() {
+		int tupleCount = buf.getInt(tupleCountOff);
+		if (tupleCount > 0) {
+			// return the last page from the linked list of free pages
+			// TODO: this is a dumb policy, but good enough for now
+			int lastPageOff = buf.getInt(freeSpaceOff) - 4;
+			buf.putInt(freeSpaceOff, lastPageOff);
+			buf.putInt(tupleCountOff, tupleCount - 1);
+			return buf.getInt(lastPageOff);
+		} else {
+			return -1;
+		}
+	}
 
-    // must be checked before adding free page
-    // user of this class is responsible for getting a free page as a new meta
-    // page, latching it, etc. if there is no space on this page
-    public boolean hasSpace() {
-        return buf.getInt(freeSpaceOff) + 4 < buf.capacity();
-    }
+	// must be checked before adding free page
+	// user of this class is responsible for getting a free page as a new meta
+	// page, latching it, etc. if there is no space on this page
+	public boolean hasSpace() {
+		return buf.getInt(freeSpaceOff) + 4 < buf.capacity();
+	}
 
-    // on bounds checking is done, there must be free space
-    public void addFreePage(int freePage) {
-        int freeSpace = buf.getInt(freeSpaceOff);
-        buf.putInt(freeSpace, freePage);
-        buf.putInt(freeSpaceOff, freeSpace + 4);
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-    }
+	// on bounds checking is done, there must be free space
+	public void addFreePage(int freePage) {
+		int freeSpace = buf.getInt(freeSpaceOff);
+		buf.putInt(freeSpace, freePage);
+		buf.putInt(freeSpaceOff, freeSpace + 4);
+		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+	}
 
-    @Override
-    public byte getLevel() {
-        return buf.get(levelOff);
-    }
+	@Override
+	public byte getLevel() {
+		return buf.get(levelOff);
+	}
 
-    @Override
-    public void setLevel(byte level) {
-        buf.put(levelOff, level);
-    }
+	@Override
+	public void setLevel(byte level) {
+		buf.put(levelOff, level);
+	}
 
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
 
-    @Override
-    public void setPage(ICachedPage page) {
-        this.page = page;
-        this.buf = page.getBuffer();
-    }
+	@Override
+	public void setPage(ICachedPage page) {
+		this.page = page;
+		this.buf = page.getBuffer();
+	}
 
-    @Override
-    public void initBuffer(int level) {
-        buf.putInt(freeSpaceOff, nextPageOff + 4);
-        buf.putInt(tupleCountOff, 0);
-        buf.putInt(levelOff, level);
-        buf.putInt(nextPageOff, -1);
-    }
+	@Override
+	public void initBuffer(int level) {
+		buf.putInt(freeSpaceOff, nextPageOff + 4);
+		buf.putInt(tupleCountOff, 0);
+		buf.putInt(levelOff, level);
+		buf.putInt(nextPageOff, -1);
+	}
 
-    @Override
-    public int getNextPage() {
-        return buf.getInt(nextPageOff);
-    }
+	@Override
+	public int getNextPage() {
+		return buf.getInt(nextPageOff);
+	}
 
-    @Override
-    public void setNextPage(int nextPage) {
-        buf.putInt(nextPageOff, nextPage);
-    }
+	@Override
+	public void setNextPage(int nextPage) {
+		buf.putInt(nextPageOff, nextPage);
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
index 409c8b2..68d1ee3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
@@ -19,8 +19,8 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 
 public class LIFOMetaDataFrameFactory implements ITreeIndexMetaDataFrameFactory {
-    @Override
-    public ITreeIndexMetaDataFrame createFrame() {
-        return new LIFOMetaDataFrame();
-    }
+	@Override
+	public ITreeIndexMetaDataFrame createFrame() {
+		return new LIFOMetaDataFrame();
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index af1e337..e5c37ff 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -38,287 +38,309 @@
 
 public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
 
-    protected static final int pageLsnOff = 0; // 0
-    protected static final int tupleCountOff = pageLsnOff + 4; // 4
-    protected static final int freeSpaceOff = tupleCountOff + 4; // 8
-    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
-    protected static final byte levelOff = totalFreeSpaceOff + 4;
-    protected static final byte smFlagOff = levelOff + 1;
+	protected static final int pageLsnOff = 0; // 0
+	protected static final int tupleCountOff = pageLsnOff + 4; // 4
+	protected static final int freeSpaceOff = tupleCountOff + 4; // 8
+	protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+	protected static final byte levelOff = totalFreeSpaceOff + 4;
+	protected static final byte smFlagOff = levelOff + 1;
 
-    protected ICachedPage page = null;
-    protected ByteBuffer buf = null;
-    protected ISlotManager slotManager;
+	protected ICachedPage page = null;
+	protected ByteBuffer buf = null;
+	protected ISlotManager slotManager;
 
-    protected ITreeIndexTupleWriter tupleWriter;
-    protected ITreeIndexTupleReference frameTuple;
+	protected ITreeIndexTupleWriter tupleWriter;
+	protected ITreeIndexTupleReference frameTuple;
 
-    public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
-        this.tupleWriter = tupleWriter;
-        this.frameTuple = tupleWriter.createTupleReference();
-        this.slotManager = slotManager;
-    }
+	public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter,
+			ISlotManager slotManager) {
+		this.tupleWriter = tupleWriter;
+		this.frameTuple = tupleWriter.createTupleReference();
+		this.slotManager = slotManager;
+	}
 
-    @Override
-    public void initBuffer(byte level) {
-        buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
-        // during creation
-        buf.putInt(tupleCountOff, 0);
-        resetSpaceParams();
-        buf.put(levelOff, level);
-        buf.put(smFlagOff, (byte) 0);
-    }
+	@Override
+	public void initBuffer(byte level) {
+		buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
+		// during creation
+		buf.putInt(tupleCountOff, 0);
+		resetSpaceParams();
+		buf.put(levelOff, level);
+		buf.put(smFlagOff, (byte) 0);
+	}
 
-    @Override
-    public boolean isLeaf() {
-        return buf.get(levelOff) == 0;
-    }
+	@Override
+	public boolean isLeaf() {
+		return buf.get(levelOff) == 0;
+	}
 
-    @Override
-    public boolean isInterior() {
-        return buf.get(levelOff) > 0;
-    }
+	@Override
+	public boolean isInterior() {
+		return buf.get(levelOff) > 0;
+	}
 
-    @Override
-    public byte getLevel() {
-        return buf.get(levelOff);
-    }
+	@Override
+	public byte getLevel() {
+		return buf.get(levelOff);
+	}
 
-    @Override
-    public void setLevel(byte level) {
-        buf.put(levelOff, level);
-    }
+	@Override
+	public void setLevel(byte level) {
+		buf.put(levelOff, level);
+	}
 
-    @Override
-    public boolean getSmFlag() {
-        return buf.get(smFlagOff) != 0;
-    }
+	@Override
+	public boolean getSmFlag() {
+		return buf.get(smFlagOff) != 0;
+	}
 
-    @Override
-    public void setSmFlag(boolean smFlag) {
-        if (smFlag)
-            buf.put(smFlagOff, (byte) 1);
-        else
-            buf.put(smFlagOff, (byte) 0);
-    }
+	@Override
+	public void setSmFlag(boolean smFlag) {
+		if (smFlag)
+			buf.put(smFlagOff, (byte) 1);
+		else
+			buf.put(smFlagOff, (byte) 0);
+	}
 
-    @Override
-    public int getFreeSpaceOff() {
-        return buf.getInt(freeSpaceOff);
-    }
+	@Override
+	public int getFreeSpaceOff() {
+		return buf.getInt(freeSpaceOff);
+	}
 
-    @Override
-    public void setFreeSpaceOff(int freeSpace) {
-        buf.putInt(freeSpaceOff, freeSpace);
-    }
+	@Override
+	public void setFreeSpaceOff(int freeSpace) {
+		buf.putInt(freeSpaceOff, freeSpace);
+	}
 
-    @Override
-    public void setPage(ICachedPage page) {
-        this.page = page;
-        this.buf = page.getBuffer();
-        slotManager.setFrame(this);
-    }
+	@Override
+	public void setPage(ICachedPage page) {
+		this.page = page;
+		this.buf = page.getBuffer();
+		slotManager.setFrame(this);
+	}
 
-    @Override
-    public ByteBuffer getBuffer() {
-        return page.getBuffer();
-    }
+	@Override
+	public ByteBuffer getBuffer() {
+		return page.getBuffer();
+	}
 
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
 
-    @Override
-    public boolean compact(MultiComparator cmp) {
-        resetSpaceParams();
-        frameTuple.setFieldCount(cmp.getFieldCount());
+	@Override
+	public boolean compact(MultiComparator cmp) {
+		resetSpaceParams();
+		frameTuple.setFieldCount(cmp.getFieldCount());
 
-        int tupleCount = buf.getInt(tupleCountOff);
-        int freeSpace = buf.getInt(freeSpaceOff);
+		int tupleCount = buf.getInt(tupleCountOff);
+		int freeSpace = buf.getInt(freeSpaceOff);
 
-        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-        sortedTupleOffs.ensureCapacity(tupleCount);
-        for (int i = 0; i < tupleCount; i++) {
-            int slotOff = slotManager.getSlotOff(i);
-            int tupleOff = slotManager.getTupleOff(slotOff);
-            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-        }
-        Collections.sort(sortedTupleOffs);
+		ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+		sortedTupleOffs.ensureCapacity(tupleCount);
+		for (int i = 0; i < tupleCount; i++) {
+			int slotOff = slotManager.getSlotOff(i);
+			int tupleOff = slotManager.getTupleOff(slotOff);
+			sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+		}
+		Collections.sort(sortedTupleOffs);
 
-        for (int i = 0; i < sortedTupleOffs.size(); i++) {
-            int tupleOff = sortedTupleOffs.get(i).tupleOff;
-            frameTuple.resetByTupleOffset(buf, tupleOff);
+		for (int i = 0; i < sortedTupleOffs.size(); i++) {
+			int tupleOff = sortedTupleOffs.get(i).tupleOff;
+			frameTuple.resetByTupleOffset(buf, tupleOff);
 
-            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-            int tupleLength = tupleEndOff - tupleOff;
-            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+			int tupleEndOff = frameTuple.getFieldStart(frameTuple
+					.getFieldCount() - 1)
+					+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+			int tupleLength = tupleEndOff - tupleOff;
+			System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace,
+					tupleLength);
 
-            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-            freeSpace += tupleLength;
-        }
+			slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+			freeSpace += tupleLength;
+		}
 
-        buf.putInt(freeSpaceOff, freeSpace);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+		buf.putInt(freeSpaceOff, freeSpace);
+		buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount
+				* slotManager.getSlotSize());
 
-        return false;
-    }
+		return false;
+	}
 
-    @Override
-    public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
+	@Override
+	public void delete(ITupleReference tuple, MultiComparator cmp,
+			boolean exactDelete) throws Exception {
 
-        frameTuple.setFieldCount(cmp.getFieldCount());
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        if (tupleIndex < 0) {
-            throw new TreeIndexException("Key to be deleted does not exist.");
-        } else {
-            if (exactDelete) {
-                // check the non-key columns for equality by byte-by-byte
-                // comparison
-                int tupleOff = slotManager.getTupleOff(slotOff);
-                frameTuple.resetByTupleOffset(buf, tupleOff);
+		frameTuple.setFieldCount(cmp.getFieldCount());
+		int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp,
+				FindTupleMode.FTM_EXACT,
+				FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+		int slotOff = slotManager.getSlotOff(tupleIndex);
+		if (tupleIndex < 0) {
+			throw new TreeIndexException("Key to be deleted does not exist.");
+		} else {
+			if (exactDelete) {
+				// check the non-key columns for equality by byte-by-byte
+				// comparison
+				int tupleOff = slotManager.getTupleOff(slotOff);
+				frameTuple.resetByTupleOffset(buf, tupleOff);
 
-                int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1,
-                        cmp.getFieldCount() - cmp.getKeyFieldCount());
-                if (comparison != 0) {
-                    throw new TreeIndexException(
-                            "Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
-                }
-            }
+				int comparison = cmp.fieldRangeCompare(tuple, frameTuple,
+						cmp.getKeyFieldCount() - 1,
+						cmp.getFieldCount() - cmp.getKeyFieldCount());
+				if (comparison != 0) {
+					throw new TreeIndexException(
+							"Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+				}
+			}
 
-            int tupleOff = slotManager.getTupleOff(slotOff);
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            int tupleSize = tupleWriter.bytesRequired(frameTuple);
+			int tupleOff = slotManager.getTupleOff(slotOff);
+			frameTuple.resetByTupleOffset(buf, tupleOff);
+			int tupleSize = tupleWriter.bytesRequired(frameTuple);
 
-            // perform deletion (we just do a memcpy to overwrite the slot)
-            int slotStartOff = slotManager.getSlotEndOff();
-            int length = slotOff - slotStartOff;
-            System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+			// perform deletion (we just do a memcpy to overwrite the slot)
+			int slotStartOff = slotManager.getSlotEndOff();
+			int length = slotOff - slotStartOff;
+			System.arraycopy(buf.array(), slotStartOff, buf.array(),
+					slotStartOff + slotManager.getSlotSize(), length);
 
-            // maintain space information
-            buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-            buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
-        }
-    }
+			// maintain space information
+			buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+			buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
+					+ tupleSize + slotManager.getSlotSize());
+		}
+	}
 
-    @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
-        int bytesRequired = tupleWriter.bytesRequired(tuple);
-        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
-                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        else
-            return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
+	@Override
+	public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
+			MultiComparator cmp) {
+		int bytesRequired = tupleWriter.bytesRequired(tuple);
+		if (bytesRequired + slotManager.getSlotSize() <= buf.capacity()
+				- buf.getInt(freeSpaceOff)
+				- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+			return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+		else if (bytesRequired + slotManager.getSlotSize() <= buf
+				.getInt(totalFreeSpaceOff))
+			return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+		else
+			return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+	}
 
-    @Override
-    public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
-        // TODO Auto-generated method stub
-        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
+	@Override
+	public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
+			MultiComparator cmp) {
+		// TODO Auto-generated method stub
+		return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+	}
 
-    protected void resetSpaceParams() {
-        buf.putInt(freeSpaceOff, smFlagOff + 1);
-        buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
-    }
+	protected void resetSpaceParams() {
+		buf.putInt(freeSpaceOff, smFlagOff + 1);
+		buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
+	}
 
-    @Override
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
-        frameTuple.setFieldCount(cmp.getFieldCount());
-        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-    }
+	@Override
+	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
+			throws Exception {
+		frameTuple.setFieldCount(cmp.getFieldCount());
+		return slotManager.findTupleIndex(tuple, frameTuple, cmp,
+				FindTupleMode.FTM_INCLUSIVE,
+				FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+	}
 
-    @Override
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
-        slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
-    }
+	@Override
+	public void insert(ITupleReference tuple, MultiComparator cmp,
+			int tupleIndex) throws Exception {
+		slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+		int bytesWritten = tupleWriter.writeTuple(tuple, buf,
+				buf.getInt(freeSpaceOff));
+		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+		buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
+				- bytesWritten - slotManager.getSlotSize());
+	}
 
-    @Override
-    public void update(int rid, ITupleReference tuple) throws Exception {
-        // TODO Auto-generated method stub
+	@Override
+	public void update(int rid, ITupleReference tuple) throws Exception {
+		// TODO Auto-generated method stub
 
-    }
+	}
 
-    @Override
-    public void printHeader() {
-        // TODO Auto-generated method stub
+	@Override
+	public void printHeader() {
+		// TODO Auto-generated method stub
 
-    }
+	}
 
-    @Override
-    public int getTupleCount() {
-        return buf.getInt(tupleCountOff);
-    }
+	@Override
+	public int getTupleCount() {
+		return buf.getInt(tupleCountOff);
+	}
 
-    public ISlotManager getSlotManager() {
-        return slotManager;
-    }
+	public ISlotManager getSlotManager() {
+		return slotManager;
+	}
 
-    @Override
-    public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        int tupleCount = buf.getInt(tupleCountOff);
-        frameTuple.setFieldCount(fields.length);
-        for (int i = 0; i < tupleCount; i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
-                ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
-                        frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
-                DataInput dataIn = new DataInputStream(inStream);
-                Object o = fields[j].deserialize(dataIn);
-                strBuilder.append(o.toString() + " ");
-            }
-            strBuilder.append(" | ");
-        }
-        strBuilder.append("\n");
-        return strBuilder.toString();
-    }
+	@Override
+	public String printKeys(MultiComparator cmp,
+			ISerializerDeserializer[] fields) throws HyracksDataException {
+		StringBuilder strBuilder = new StringBuilder();
+		int tupleCount = buf.getInt(tupleCountOff);
+		frameTuple.setFieldCount(fields.length);
+		for (int i = 0; i < tupleCount; i++) {
+			frameTuple.resetByTupleIndex(this, i);
+			for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+				ByteArrayInputStream inStream = new ByteArrayInputStream(
+						frameTuple.getFieldData(j),
+						frameTuple.getFieldStart(j),
+						frameTuple.getFieldLength(j));
+				DataInput dataIn = new DataInputStream(inStream);
+				Object o = fields[j].deserialize(dataIn);
+				strBuilder.append(o.toString() + " ");
+			}
+			strBuilder.append(" | ");
+		}
+		strBuilder.append("\n");
+		return strBuilder.toString();
+	}
 
-    @Override
-    public int getTupleOffset(int slotNum) {
-        return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
-    }
+	@Override
+	public int getTupleOffset(int slotNum) {
+		return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum
+				* slotManager.getSlotSize());
+	}
 
-    @Override
-    public int getPageLsn() {
-        return buf.getInt(pageLsnOff);
-    }
+	@Override
+	public int getPageLsn() {
+		return buf.getInt(pageLsnOff);
+	}
 
-    @Override
-    public void setPageLsn(int pageLsn) {
-        buf.putInt(pageLsnOff, pageLsn);
-    }
+	@Override
+	public void setPageLsn(int pageLsn) {
+		buf.putInt(pageLsnOff, pageLsn);
+	}
 
-    @Override
-    public int getTotalFreeSpace() {
-        return buf.getInt(totalFreeSpaceOff);
-    }
+	@Override
+	public int getTotalFreeSpace() {
+		return buf.getInt(totalFreeSpaceOff);
+	}
 
-    @Override
-    public boolean compress(MultiComparator cmp) {
-        return false;
-    }
+	@Override
+	public boolean compress(MultiComparator cmp) {
+		return false;
+	}
 
-    @Override
-    public int getSlotSize() {
-        return slotManager.getSlotSize();
-    }
+	@Override
+	public int getSlotSize() {
+		return slotManager.getSlotSize();
+	}
 
-    @Override
-    public void setPageTupleFieldCount(int fieldCount) {
-        frameTuple.setFieldCount(fieldCount);
-    }
+	@Override
+	public void setPageTupleFieldCount(int fieldCount) {
+		frameTuple.setFieldCount(fieldCount);
+	}
 
-    public ITreeIndexTupleWriter getTupleWriter() {
-        return tupleWriter;
-    }
+	public ITreeIndexTupleWriter getTupleWriter() {
+		return tupleWriter;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
index 42bf70f..f7e2e3b 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
@@ -10,178 +10,193 @@
 
 public class LinkedListFreePageManager implements IFreePageManager {
 
-    private static final byte META_PAGE_LEVEL_INDICATOR = -1;
-    private static final byte FREE_PAGE_LEVEL_INDICATOR = -2;
-    private final IBufferCache bufferCache;
-    private final int fileId;
-    private final int headPage;
-    private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+	private static final byte META_PAGE_LEVEL_INDICATOR = -1;
+	private static final byte FREE_PAGE_LEVEL_INDICATOR = -2;
+	private final IBufferCache bufferCache;
+	private final int fileId;
+	private final int headPage;
+	private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
 
-    public LinkedListFreePageManager(IBufferCache bufferCache, int fileId, int headPage,
-            ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
-        this.bufferCache = bufferCache;
-        this.fileId = fileId;
-        this.headPage = headPage;
-        this.metaDataFrameFactory = metaDataFrameFactory;
-    }
+	public LinkedListFreePageManager(IBufferCache bufferCache, int fileId,
+			int headPage, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+		this.bufferCache = bufferCache;
+		this.fileId = fileId;
+		this.headPage = headPage;
+		this.metaDataFrameFactory = metaDataFrameFactory;
+	}
 
-    @Override
-    public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException {
+	@Override
+	public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage)
+			throws HyracksDataException {
 
-        ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
-        metaNode.acquireWriteLatch();
+		ICachedPage metaNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+		metaNode.acquireWriteLatch();
 
-        try {
-            metaFrame.setPage(metaNode);
+		try {
+			metaFrame.setPage(metaNode);
 
-            if (metaFrame.hasSpace()) {
-                metaFrame.addFreePage(freePage);
-            } else {
-                // allocate a new page in the chain of meta pages
-                int newPage = metaFrame.getFreePage();
-                if (newPage < 0) {
-                    throw new Exception("Inconsistent Meta Page State. It has no space, but it also has no entries.");
-                }
+			if (metaFrame.hasSpace()) {
+				metaFrame.addFreePage(freePage);
+			} else {
+				// allocate a new page in the chain of meta pages
+				int newPage = metaFrame.getFreePage();
+				if (newPage < 0) {
+					throw new Exception(
+							"Inconsistent Meta Page State. It has no space, but it also has no entries.");
+				}
 
-                ICachedPage newNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newPage), false);
-                newNode.acquireWriteLatch();
+				ICachedPage newNode = bufferCache.pin(
+						BufferedFileHandle.getDiskPageId(fileId, newPage),
+						false);
+				newNode.acquireWriteLatch();
 
-                try {
-                    int metaMaxPage = metaFrame.getMaxPage();
+				try {
+					int metaMaxPage = metaFrame.getMaxPage();
 
-                    // copy metaDataPage to newNode
-                    System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode
-                            .getBuffer().capacity());
+					// copy metaDataPage to newNode
+					System.arraycopy(metaNode.getBuffer().array(), 0, newNode
+							.getBuffer().array(), 0, metaNode.getBuffer()
+							.capacity());
 
-                    metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
-                    metaFrame.setNextPage(newPage);
-                    metaFrame.setMaxPage(metaMaxPage);
-                    metaFrame.addFreePage(freePage);
-                } finally {
-                    newNode.releaseWriteLatch();
-                    bufferCache.unpin(newNode);
-                }
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            metaNode.releaseWriteLatch();
-            bufferCache.unpin(metaNode);
-        }
-    }
+					metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+					metaFrame.setNextPage(newPage);
+					metaFrame.setMaxPage(metaMaxPage);
+					metaFrame.addFreePage(freePage);
+				} finally {
+					newNode.releaseWriteLatch();
+					bufferCache.unpin(newNode);
+				}
+			}
+		} catch (Exception e) {
+			e.printStackTrace();
+		} finally {
+			metaNode.releaseWriteLatch();
+			bufferCache.unpin(metaNode);
+		}
+	}
 
-    @Override
-    public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-        ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+	@Override
+	public int getFreePage(ITreeIndexMetaDataFrame metaFrame)
+			throws HyracksDataException {
+		ICachedPage metaNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, headPage), false);
 
-        metaNode.acquireWriteLatch();
+		metaNode.acquireWriteLatch();
 
-        int freePage = -1;
-        try {
-            metaFrame.setPage(metaNode);
-            freePage = metaFrame.getFreePage();
-            if (freePage < 0) { // no free page entry on this page
-                int nextPage = metaFrame.getNextPage();
-                if (nextPage > 0) { // sibling may have free pages
-                    ICachedPage nextNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextPage), false);
+		int freePage = -1;
+		try {
+			metaFrame.setPage(metaNode);
+			freePage = metaFrame.getFreePage();
+			if (freePage < 0) { // no free page entry on this page
+				int nextPage = metaFrame.getNextPage();
+				if (nextPage > 0) { // sibling may have free pages
+					ICachedPage nextNode = bufferCache.pin(
+							BufferedFileHandle.getDiskPageId(fileId, nextPage),
+							false);
 
-                    nextNode.acquireWriteLatch();
-                    // we copy over the free space entries of nextpage into the
-                    // first meta page (metaDataPage)
-                    // we need to link the first page properly to the next page
-                    // of nextpage
-                    try {
-                        // remember entries that remain unchanged
-                        int maxPage = metaFrame.getMaxPage();
+					nextNode.acquireWriteLatch();
+					// we copy over the free space entries of nextpage into the
+					// first meta page (metaDataPage)
+					// we need to link the first page properly to the next page
+					// of nextpage
+					try {
+						// remember entries that remain unchanged
+						int maxPage = metaFrame.getMaxPage();
 
-                        // copy entire page (including sibling pointer, free
-                        // page entries, and all other info)
-                        // after this copy nextPage is considered a free page
-                        System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode
-                                .getBuffer().capacity());
+						// copy entire page (including sibling pointer, free
+						// page entries, and all other info)
+						// after this copy nextPage is considered a free page
+						System.arraycopy(nextNode.getBuffer().array(), 0,
+								metaNode.getBuffer().array(), 0, nextNode
+										.getBuffer().capacity());
 
-                        // reset unchanged entry
-                        metaFrame.setMaxPage(maxPage);
+						// reset unchanged entry
+						metaFrame.setMaxPage(maxPage);
 
-                        freePage = metaFrame.getFreePage();
-                        // sibling also has no free pages, this "should" not
-                        // happen, but we deal with it anyway just to be safe
-                        if (freePage < 0) {
-                            freePage = nextPage;
-                        } else {
-                            metaFrame.addFreePage(nextPage);
-                        }
-                    } finally {
-                        nextNode.releaseWriteLatch();
-                        bufferCache.unpin(nextNode);
-                    }
-                } else {
-                    freePage = metaFrame.getMaxPage();
-                    freePage++;
-                    metaFrame.setMaxPage(freePage);
-                }
-            }
-        } finally {
-            metaNode.releaseWriteLatch();
-            bufferCache.unpin(metaNode);
-        }
+						freePage = metaFrame.getFreePage();
+						// sibling also has no free pages, this "should" not
+						// happen, but we deal with it anyway just to be safe
+						if (freePage < 0) {
+							freePage = nextPage;
+						} else {
+							metaFrame.addFreePage(nextPage);
+						}
+					} finally {
+						nextNode.releaseWriteLatch();
+						bufferCache.unpin(nextNode);
+					}
+				} else {
+					freePage = metaFrame.getMaxPage();
+					freePage++;
+					metaFrame.setMaxPage(freePage);
+				}
+			}
+		} finally {
+			metaNode.releaseWriteLatch();
+			bufferCache.unpin(metaNode);
+		}
 
-        return freePage;
-    }
+		return freePage;
+	}
 
-    @Override
-    public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-        ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
-        metaNode.acquireWriteLatch();
-        int maxPage = -1;
-        try {
-            metaFrame.setPage(metaNode);
-            maxPage = metaFrame.getMaxPage();
-        } finally {
-            metaNode.releaseWriteLatch();
-            bufferCache.unpin(metaNode);
-        }
-        return maxPage;
-    }
+	@Override
+	public int getMaxPage(ITreeIndexMetaDataFrame metaFrame)
+			throws HyracksDataException {
+		ICachedPage metaNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+		metaNode.acquireWriteLatch();
+		int maxPage = -1;
+		try {
+			metaFrame.setPage(metaNode);
+			maxPage = metaFrame.getMaxPage();
+		} finally {
+			metaNode.releaseWriteLatch();
+			bufferCache.unpin(metaNode);
+		}
+		return maxPage;
+	}
 
-    @Override
-    public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException {
-        // initialize meta data page
-        ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), true);
+	@Override
+	public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage)
+			throws HyracksDataException {
+		// initialize meta data page
+		ICachedPage metaNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, headPage), true);
 
-        metaNode.acquireWriteLatch();
-        try {
-            metaFrame.setPage(metaNode);
-            metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
-            metaFrame.setMaxPage(currentMaxPage);
-        } finally {
-            metaNode.releaseWriteLatch();
-            bufferCache.unpin(metaNode);
-        }
-    }
+		metaNode.acquireWriteLatch();
+		try {
+			metaFrame.setPage(metaNode);
+			metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+			metaFrame.setMaxPage(currentMaxPage);
+		} finally {
+			metaNode.releaseWriteLatch();
+			bufferCache.unpin(metaNode);
+		}
+	}
 
-    @Override
-    public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
-        return metaDataFrameFactory;
-    }
+	@Override
+	public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
+		return metaDataFrameFactory;
+	}
 
-    @Override
-    public byte getFreePageLevelIndicator() {
-        return FREE_PAGE_LEVEL_INDICATOR;
-    }
+	@Override
+	public byte getFreePageLevelIndicator() {
+		return FREE_PAGE_LEVEL_INDICATOR;
+	}
 
-    @Override
-    public byte getMetaPageLevelIndicator() {
-        return META_PAGE_LEVEL_INDICATOR;
-    }
+	@Override
+	public byte getMetaPageLevelIndicator() {
+		return META_PAGE_LEVEL_INDICATOR;
+	}
 
-    @Override
-    public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
-        return metaFrame.getLevel() == FREE_PAGE_LEVEL_INDICATOR;
-    }
+	@Override
+	public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
+		return metaFrame.getLevel() == FREE_PAGE_LEVEL_INDICATOR;
+	}
 
-    @Override
-    public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
-        return metaFrame.getLevel() == META_PAGE_LEVEL_INDICATOR;
-    }
+	@Override
+	public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
+		return metaFrame.getLevel() == META_PAGE_LEVEL_INDICATOR;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
index 2c2cb5e..01b91b1 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
@@ -28,123 +28,128 @@
 
 public class TreeDiskOrderScanCursor implements ITreeIndexCursor {
 
-    private int tupleIndex = 0;
-    private int fileId = -1;
-    int currentPageId = -1;
-    int maxPageId = -1;
-    private ICachedPage page = null;
-    private ITreeIndexFrame frame = null;
-    private IBufferCache bufferCache = null;
+	private int tupleIndex = 0;
+	private int fileId = -1;
+	int currentPageId = -1;
+	int maxPageId = -1;
+	private ICachedPage page = null;
+	private ITreeIndexFrame frame = null;
+	private IBufferCache bufferCache = null;
 
-    private ITreeIndexTupleReference frameTuple;
+	private ITreeIndexTupleReference frameTuple;
 
-    public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
-        this.frame = frame;
-        this.frameTuple = frame.getTupleWriter().createTupleReference();
-    }
+	public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
+		this.frame = frame;
+		this.frameTuple = frame.getTupleWriter().createTupleReference();
+	}
 
-    @Override
-    public void close() throws Exception {
-        page.releaseReadLatch();
-        bufferCache.unpin(page);
-        page = null;
-    }
+	@Override
+	public void close() throws Exception {
+		page.releaseReadLatch();
+		bufferCache.unpin(page);
+		page = null;
+	}
 
-    @Override
-    public ITreeIndexTupleReference getTuple() {
-        return frameTuple;
-    }
+	@Override
+	public ITreeIndexTupleReference getTuple() {
+		return frameTuple;
+	}
 
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
 
-    private boolean positionToNextLeaf(boolean skipCurrent) throws HyracksDataException {
-        while ((frame.getLevel() != 0 || skipCurrent) && (currentPageId <= maxPageId) || (frame.getTupleCount() == 0)) {
-            currentPageId++;
+	private boolean positionToNextLeaf(boolean skipCurrent)
+			throws HyracksDataException {
+		while ((frame.getLevel() != 0 || skipCurrent)
+				&& (currentPageId <= maxPageId) || (frame.getTupleCount() == 0)) {
+			currentPageId++;
 
-            ICachedPage nextPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-            nextPage.acquireReadLatch();
+			ICachedPage nextPage = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, currentPageId),
+					false);
+			nextPage.acquireReadLatch();
 
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
+			page.releaseReadLatch();
+			bufferCache.unpin(page);
 
-            page = nextPage;
-            frame.setPage(page);
-            tupleIndex = 0;
-            skipCurrent = false;
-        }
-        if (currentPageId <= maxPageId)
-            return true;
-        else
-            return false;
-    }
+			page = nextPage;
+			frame.setPage(page);
+			tupleIndex = 0;
+			skipCurrent = false;
+		}
+		if (currentPageId <= maxPageId)
+			return true;
+		else
+			return false;
+	}
 
-    @Override
-    public boolean hasNext() throws Exception {
-        if (tupleIndex >= frame.getTupleCount()) {
-            boolean nextLeafExists = positionToNextLeaf(true);
-            if (nextLeafExists) {
-                frameTuple.resetByTupleIndex(frame, tupleIndex);
-                return true;
-            } else {
-                return false;
-            }
-        }
+	@Override
+	public boolean hasNext() throws Exception {
+		if (tupleIndex >= frame.getTupleCount()) {
+			boolean nextLeafExists = positionToNextLeaf(true);
+			if (nextLeafExists) {
+				frameTuple.resetByTupleIndex(frame, tupleIndex);
+				return true;
+			} else {
+				return false;
+			}
+		}
 
-        frameTuple.resetByTupleIndex(frame, tupleIndex);
-        return true;
-    }
+		frameTuple.resetByTupleIndex(frame, tupleIndex);
+		return true;
+	}
 
-    @Override
-    public void next() throws Exception {
-        tupleIndex++;
-    }
+	@Override
+	public void next() throws Exception {
+		tupleIndex++;
+	}
 
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
-        // in case open is called multiple times without closing
-        if (page != null) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-        }
+	@Override
+	public void open(ICursorInitialState initialState,
+			ISearchPredicate searchPred) throws HyracksDataException {
+		// in case open is called multiple times without closing
+		if (page != null) {
+			page.releaseReadLatch();
+			bufferCache.unpin(page);
+		}
 
-        page = initialState.getPage();
-        tupleIndex = 0;
-        frame.setPage(page);
-        MultiComparator lowKeyCmp = searchPred.getLowKeyComparator();
-        frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
-        boolean leafExists = positionToNextLeaf(false);
-        if (!leafExists) {
-            throw new HyracksDataException(
-                    "Failed to open disk-order scan cursor for tree index. Traget tree index has no leaves.");
-        }
-    }
+		page = initialState.getPage();
+		tupleIndex = 0;
+		frame.setPage(page);
+		MultiComparator lowKeyCmp = searchPred.getLowKeyComparator();
+		frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
+		boolean leafExists = positionToNextLeaf(false);
+		if (!leafExists) {
+			throw new HyracksDataException(
+					"Failed to open disk-order scan cursor for tree index. Traget tree index has no leaves.");
+		}
+	}
 
-    @Override
-    public void reset() {
-        tupleIndex = 0;
-        currentPageId = -1;
-        maxPageId = -1;
-        page = null;
-    }
+	@Override
+	public void reset() {
+		tupleIndex = 0;
+		currentPageId = -1;
+		maxPageId = -1;
+		page = null;
+	}
 
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
+	@Override
+	public void setBufferCache(IBufferCache bufferCache) {
+		this.bufferCache = bufferCache;
+	}
 
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
+	@Override
+	public void setFileId(int fileId) {
+		this.fileId = fileId;
+	}
 
-    public void setCurrentPageId(int currentPageId) {
-        this.currentPageId = currentPageId;
-    }
+	public void setCurrentPageId(int currentPageId) {
+		this.currentPageId = currentPageId;
+	}
 
-    public void setMaxPageId(int maxPageId) {
-        this.maxPageId = maxPageId;
-    }
+	public void setMaxPageId(int maxPageId) {
+		this.maxPageId = maxPageId;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
index cea2500..11ac257 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public enum FindTupleMode {
-    FTM_INCLUSIVE, FTM_EXCLUSIVE, FTM_EXACT
+	FTM_INCLUSIVE, FTM_EXCLUSIVE, FTM_EXACT
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
index 0d534ed..b8f3c9a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public enum FindTupleNoExactMatchPolicy {
-    FTP_LOWER_KEY, FTP_HIGHER_KEY
+	FTP_LOWER_KEY, FTP_HIGHER_KEY
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
index e40c5c8..780acd8 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public enum IndexOp {
-    INSERT, DELETE, UPDATE, SEARCH, DISKORDERSCAN
+	INSERT, DELETE, UPDATE, SEARCH, DISKORDERSCAN
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
index 4f6e656..9122174 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
@@ -1,5 +1,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public interface IndexOpContext {
-    void reset();
+	void reset();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
index 46551dd..d888aa0 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
@@ -16,74 +16,74 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public class IntArrayList {
-    private int[] data;
-    private int size;
-    private int first;
-    private final int growth;
+	private int[] data;
+	private int size;
+	private int first;
+	private final int growth;
 
-    public IntArrayList(int initialCapacity, int growth) {
-        data = new int[initialCapacity];
-        size = 0;
-        first = 0;
-        this.growth = growth;
-    }
+	public IntArrayList(int initialCapacity, int growth) {
+		data = new int[initialCapacity];
+		size = 0;
+		first = 0;
+		this.growth = growth;
+	}
 
-    public int size() {
-        return size;
-    }
+	public int size() {
+		return size;
+	}
 
-    public int first() {
-        return first;
-    }
+	public int first() {
+		return first;
+	}
 
-    public void add(int i) {
-        if (size == data.length) {
-            int[] newData = new int[data.length + growth];
-            System.arraycopy(data, 0, newData, 0, data.length);
-            data = newData;
-        }
+	public void add(int i) {
+		if (size == data.length) {
+			int[] newData = new int[data.length + growth];
+			System.arraycopy(data, 0, newData, 0, data.length);
+			data = newData;
+		}
 
-        data[size++] = i;
-    }
+		data[size++] = i;
+	}
 
-    public void removeLast() {
-        if (size > 0)
-            size--;
-    }
+	public void removeLast() {
+		if (size > 0)
+			size--;
+	}
 
-    // WARNING: caller is responsible for checking size > 0
-    public int getLast() {
-        return data[size - 1];
-    }
+	// WARNING: caller is responsible for checking size > 0
+	public int getLast() {
+		return data[size - 1];
+	}
 
-    public int get(int i) {
-        return data[i];
-    }
+	public int get(int i) {
+		return data[i];
+	}
 
-    // WARNING: caller is responsible for checking i < size
-    public void set(int i, int value) {
-        data[i] = value;
+	// WARNING: caller is responsible for checking i < size
+	public void set(int i, int value) {
+		data[i] = value;
 
-    }
+	}
 
-    public int getFirst() {
-        return data[first];
-    }
+	public int getFirst() {
+		return data[first];
+	}
 
-    public void moveFirst() {
-        first++;
-    }
+	public void moveFirst() {
+		first++;
+	}
 
-    public void clear() {
-        size = 0;
-        first = 0;
-    }
+	public void clear() {
+		size = 0;
+		first = 0;
+	}
 
-    public boolean isLast() {
-        return size == first;
-    }
+	public boolean isLast() {
+		return size == first;
+	}
 
-    public boolean isEmpty() {
-        return size == 0;
-    }
+	public boolean isEmpty() {
+		return size == 0;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
index 1842bf8..a07ef03 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
@@ -25,78 +25,100 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 
 public class MultiComparator {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    private IBinaryComparator[] cmps = null;
-    private ITypeTrait[] typeTraits;
+	private IBinaryComparator[] cmps = null;
+	private ITypeTrait[] typeTraits;
+	private IPrimitiveValueProvider[] valueProviders = null;
 
-    private IBinaryComparator intCmp = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+	private IBinaryComparator intCmp = IntegerBinaryComparatorFactory.INSTANCE
+			.createBinaryComparator();
 
-    public IBinaryComparator getIntCmp() {
-        return intCmp;
-    }
+	public IBinaryComparator getIntCmp() {
+		return intCmp;
+	}
 
-    public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
-        this.typeTraits = typeTraits;
-        this.cmps = cmps;
-    }
+	public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
+		this.typeTraits = typeTraits;
+		this.cmps = cmps;
+	}
 
-    public int compare(ITupleReference tupleA, ITupleReference tupleB) {
-        for (int i = 0; i < cmps.length; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
-                    tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
-            if (cmp < 0)
-                return -1;
-            else if (cmp > 0)
-                return 1;
-        }
-        return 0;
-    }
+	public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps,
+			IPrimitiveValueProvider[] valueProviders) {
+		this.typeTraits = typeTraits;
+		this.cmps = cmps;
+		this.valueProviders = valueProviders;
+	}
 
-    public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
-        for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
-            int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
-                    tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
-            if (cmp < 0)
-                return -1;
-            else if (cmp > 0)
-                return 1;
-        }
-        return 0;
-    }
+	public int compare(ITupleReference tupleA, ITupleReference tupleB) {
+		for (int i = 0; i < cmps.length; i++) {
+			int cmp = cmps[i].compare(tupleA.getFieldData(i),
+					tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+					tupleB.getFieldData(i), tupleB.getFieldStart(i),
+					tupleB.getFieldLength(i));
+			if (cmp < 0)
+				return -1;
+			else if (cmp > 0)
+				return 1;
+		}
+		return 0;
+	}
 
-    public String printTuple(ITupleReference tuple, ISerializerDeserializer[] fields) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
-                    tuple.getFieldLength(i));
-            DataInput dataIn = new DataInputStream(inStream);
-            Object o = fields[i].deserialize(dataIn);
-            strBuilder.append(o.toString() + " ");
-        }
-        return strBuilder.toString();
-    }
+	public int fieldRangeCompare(ITupleReference tupleA,
+			ITupleReference tupleB, int startFieldIndex, int numFields) {
+		for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
+			int cmp = cmps[i].compare(tupleA.getFieldData(i),
+					tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+					tupleB.getFieldData(i), tupleB.getFieldStart(i),
+					tupleB.getFieldLength(i));
+			if (cmp < 0)
+				return -1;
+			else if (cmp > 0)
+				return 1;
+		}
+		return 0;
+	}
 
-    public IBinaryComparator[] getComparators() {
-        return cmps;
-    }
+	public String printTuple(ITupleReference tuple,
+			ISerializerDeserializer[] fields) throws HyracksDataException {
+		StringBuilder strBuilder = new StringBuilder();
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			ByteArrayInputStream inStream = new ByteArrayInputStream(
+					tuple.getFieldData(i), tuple.getFieldStart(i),
+					tuple.getFieldLength(i));
+			DataInput dataIn = new DataInputStream(inStream);
+			Object o = fields[i].deserialize(dataIn);
+			strBuilder.append(o.toString() + " ");
+		}
+		return strBuilder.toString();
+	}
 
-    public int getKeyFieldCount() {
-        return cmps.length;
-    }
+	public IBinaryComparator[] getComparators() {
+		return cmps;
+	}
 
-    public void setComparators(IBinaryComparator[] cmps) {
-        this.cmps = cmps;
-    }
+	public int getKeyFieldCount() {
+		return cmps.length;
+	}
 
-    public int getFieldCount() {
-        return typeTraits.length;
-    }
+	public void setComparators(IBinaryComparator[] cmps) {
+		this.cmps = cmps;
+	}
 
-    public ITypeTrait[] getTypeTraits() {
-        return typeTraits;
-    }
-}
+	public int getFieldCount() {
+		return typeTraits.length;
+	}
+
+	public ITypeTrait[] getTypeTraits() {
+		return typeTraits;
+	}
+
+	public IPrimitiveValueProvider[] getValueProviders() {
+		return valueProviders;
+	}
+
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
index 4fc1861..35231c2 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
@@ -16,18 +16,18 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public class SlotOffTupleOff implements Comparable<SlotOffTupleOff> {
-    public int tupleIndex;
-    public int slotOff;
-    public int tupleOff;
+	public int tupleIndex;
+	public int slotOff;
+	public int tupleOff;
 
-    public SlotOffTupleOff(int tupleIndex, int slotOff, int recOff) {
-        this.tupleIndex = tupleIndex;
-        this.slotOff = slotOff;
-        this.tupleOff = recOff;
-    }
+	public SlotOffTupleOff(int tupleIndex, int slotOff, int recOff) {
+		this.tupleIndex = tupleIndex;
+		this.slotOff = slotOff;
+		this.tupleOff = recOff;
+	}
 
-    @Override
-    public int compareTo(SlotOffTupleOff o) {
-        return tupleOff - o.tupleOff;
-    }
+	@Override
+	public int compareTo(SlotOffTupleOff o) {
+		return tupleOff - o.tupleOff;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index f9e00ac..353bd95 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -22,73 +22,77 @@
 
 public class SimpleTupleReference implements ITreeIndexTupleReference {
 
-    protected ByteBuffer buf;
-    protected int fieldStartIndex;
-    protected int fieldCount;
-    protected int tupleStartOff;
-    protected int nullFlagsBytes;
-    protected int fieldSlotsBytes;
+	protected ByteBuffer buf;
+	protected int fieldStartIndex;
+	protected int fieldCount;
+	protected int tupleStartOff;
+	protected int nullFlagsBytes;
+	protected int fieldSlotsBytes;
 
-    @Override
-    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
-        this.buf = buf;
-        this.tupleStartOff = tupleStartOff;
-    }
+	@Override
+	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+		this.buf = buf;
+		this.tupleStartOff = tupleStartOff;
+	}
 
-    @Override
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-        resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
-    }
+	@Override
+	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+		resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+	}
 
-    @Override
-    public void setFieldCount(int fieldCount) {
-        this.fieldCount = fieldCount;
-        nullFlagsBytes = getNullFlagsBytes();
-        fieldSlotsBytes = getFieldSlotsBytes();
-        fieldStartIndex = 0;
-    }
+	@Override
+	public void setFieldCount(int fieldCount) {
+		this.fieldCount = fieldCount;
+		nullFlagsBytes = getNullFlagsBytes();
+		fieldSlotsBytes = getFieldSlotsBytes();
+		fieldStartIndex = 0;
+	}
 
-    @Override
-    public void setFieldCount(int fieldStartIndex, int fieldCount) {
-        this.fieldCount = fieldCount;
-        this.fieldStartIndex = fieldStartIndex;
-    }
+	@Override
+	public void setFieldCount(int fieldStartIndex, int fieldCount) {
+		this.fieldCount = fieldCount;
+		this.fieldStartIndex = fieldStartIndex;
+	}
 
-    @Override
-    public int getFieldCount() {
-        return fieldCount;
-    }
+	@Override
+	public int getFieldCount() {
+		return fieldCount;
+	}
 
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return buf.array();
-    }
+	@Override
+	public byte[] getFieldData(int fIdx) {
+		return buf.array();
+	}
 
-    @Override
-    public int getFieldLength(int fIdx) {
-        if (fIdx == 0) {
-            return buf.getShort(tupleStartOff + nullFlagsBytes);
-        } else {
-            return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
-                    - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
-        }
-    }
+	@Override
+	public int getFieldLength(int fIdx) {
+		if (fIdx == 0) {
+			return buf.getShort(tupleStartOff + nullFlagsBytes);
+		} else {
+			return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
+					- buf.getShort(tupleStartOff + nullFlagsBytes
+							+ ((fIdx - 1) * 2));
+		}
+	}
 
-    @Override
-    public int getFieldStart(int fIdx) {
-        if (fIdx == 0) {
-            return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
-        } else {
-            return tupleStartOff + nullFlagsBytes + fieldSlotsBytes
-                    + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
-        }
-    }
+	@Override
+	public int getFieldStart(int fIdx) {
+		if (fIdx == 0) {
+			return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
+		} else {
+			return tupleStartOff
+					+ nullFlagsBytes
+					+ fieldSlotsBytes
+					+ buf.getShort(tupleStartOff + nullFlagsBytes
+							+ ((fIdx - 1) * 2));
+		}
+	}
 
-    protected int getNullFlagsBytes() {
-        return (int) Math.ceil(fieldCount / 8.0);
-    }
+	protected int getNullFlagsBytes() {
+		return (int) Math.ceil(fieldCount / 8.0);
+	}
 
-    protected int getFieldSlotsBytes() {
-        return fieldCount * 2;
-    }
+	protected int getFieldSlotsBytes() {
+		return fieldCount * 2;
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
index 1730c4a..11f7820 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -23,88 +23,95 @@
 
 public class SimpleTupleWriter implements ITreeIndexTupleWriter {
 
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
+	@Override
+	public int bytesRequired(ITupleReference tuple) {
+		int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			bytes += tuple.getFieldLength(i);
+		}
+		return bytes;
+	}
 
-    @Override
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
-        int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
-        for (int i = startField; i < startField + numFields; i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
+	@Override
+	public int bytesRequired(ITupleReference tuple, int startField,
+			int numFields) {
+		int bytes = getNullFlagsBytes(tuple, startField, numFields)
+				+ getFieldSlotsBytes(tuple, startField, numFields);
+		for (int i = startField; i < startField + numFields; i++) {
+			bytes += tuple.getFieldLength(i);
+		}
+		return bytes;
+	}
 
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new SimpleTupleReference();
-    }
+	@Override
+	public ITreeIndexTupleReference createTupleReference() {
+		return new SimpleTupleReference();
+	}
 
-    @Override
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple);
-        int fieldSlotsBytes = getFieldSlotsBytes(tuple);
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf.put(runner++, (byte) 0);
-        }
-        runner += fieldSlotsBytes;
+	@Override
+	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
+			int targetOff) {
+		int runner = targetOff;
+		int nullFlagsBytes = getNullFlagsBytes(tuple);
+		int fieldSlotsBytes = getFieldSlotsBytes(tuple);
+		for (int i = 0; i < nullFlagsBytes; i++) {
+			targetBuf.put(runner++, (byte) 0);
+		}
+		runner += fieldSlotsBytes;
 
-        int fieldEndOff = 0;
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
-                    tuple.getFieldLength(i));
-            fieldEndOff += tuple.getFieldLength(i);
-            runner += tuple.getFieldLength(i);
-            targetBuf.putShort(targetOff + nullFlagsBytes + i * 2, (short) fieldEndOff);
-        }
+		int fieldEndOff = 0;
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
+					targetBuf.array(), runner, tuple.getFieldLength(i));
+			fieldEndOff += tuple.getFieldLength(i);
+			runner += tuple.getFieldLength(i);
+			targetBuf.putShort(targetOff + nullFlagsBytes + i * 2,
+					(short) fieldEndOff);
+		}
 
-        return runner - targetOff;
-    }
+		return runner - targetOff;
+	}
 
-    @Override
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
-            int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf.put(runner++, (byte) 0);
-        }
-        runner += getFieldSlotsBytes(tuple, startField, numFields);
+	@Override
+	public int writeTupleFields(ITupleReference tuple, int startField,
+			int numFields, ByteBuffer targetBuf, int targetOff) {
+		int runner = targetOff;
+		int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+		for (int i = 0; i < nullFlagsBytes; i++) {
+			targetBuf.put(runner++, (byte) 0);
+		}
+		runner += getFieldSlotsBytes(tuple, startField, numFields);
 
-        int fieldEndOff = 0;
-        int fieldCounter = 0;
-        for (int i = startField; i < startField + numFields; i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
-                    tuple.getFieldLength(i));
-            fieldEndOff += tuple.getFieldLength(i);
-            runner += tuple.getFieldLength(i);
-            targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
-            fieldCounter++;
-        }
+		int fieldEndOff = 0;
+		int fieldCounter = 0;
+		for (int i = startField; i < startField + numFields; i++) {
+			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
+					targetBuf.array(), runner, tuple.getFieldLength(i));
+			fieldEndOff += tuple.getFieldLength(i);
+			runner += tuple.getFieldLength(i);
+			targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2,
+					(short) fieldEndOff);
+			fieldCounter++;
+		}
 
-        return runner - targetOff;
-    }
+		return runner - targetOff;
+	}
 
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-    }
+	protected int getNullFlagsBytes(ITupleReference tuple) {
+		return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+	}
 
-    protected int getFieldSlotsBytes(ITupleReference tuple) {
-        return tuple.getFieldCount() * 2;
-    }
+	protected int getFieldSlotsBytes(ITupleReference tuple) {
+		return tuple.getFieldCount() * 2;
+	}
 
-    protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
-        return (int) Math.ceil((double) numFields / 8.0);
-    }
+	protected int getNullFlagsBytes(ITupleReference tuple, int startField,
+			int numFields) {
+		return (int) Math.ceil((double) numFields / 8.0);
+	}
 
-    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
-        return numFields * 2;
-    }
+	protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
+			int numFields) {
+		return numFields * 2;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
index ebb2905..10d4c3a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
@@ -20,11 +20,11 @@
 
 public class SimpleTupleWriterFactory implements ITreeIndexTupleWriterFactory {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    @Override
-    public ITreeIndexTupleWriter createTupleWriter() {
-        return new SimpleTupleWriter();
-    }
+	@Override
+	public ITreeIndexTupleWriter createTupleWriter() {
+		return new SimpleTupleWriter();
+	}
 
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
index 4f571b3..31b32e6 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
@@ -22,99 +22,99 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 
 public class TypeAwareTupleReference implements ITreeIndexTupleReference {
-    protected ByteBuffer buf;
-    protected int fieldStartIndex;
-    protected int fieldCount;
-    protected int tupleStartOff;
-    protected int nullFlagsBytes;
-    protected int dataStartOff;
+	protected ByteBuffer buf;
+	protected int fieldStartIndex;
+	protected int fieldCount;
+	protected int tupleStartOff;
+	protected int nullFlagsBytes;
+	protected int dataStartOff;
 
-    protected ITypeTrait[] typeTraits;
-    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
-    protected int[] decodedFieldSlots;
+	protected ITypeTrait[] typeTraits;
+	protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+	protected int[] decodedFieldSlots;
 
-    public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
-        this.typeTraits = typeTraits;
-        this.fieldStartIndex = 0;
-    }
+	public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
+		this.typeTraits = typeTraits;
+		this.fieldStartIndex = 0;
+	}
 
-    @Override
-    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
-        this.buf = buf;
-        this.tupleStartOff = tupleStartOff;
+	@Override
+	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+		this.buf = buf;
+		this.tupleStartOff = tupleStartOff;
 
-        // decode field slots
-        int field = 0;
-        int cumul = 0;
-        int end = fieldStartIndex + fieldCount;
-        encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
-        for (int i = fieldStartIndex; i < end; i++) {
-            int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
-            if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
-                cumul += encDec.decode();
-                decodedFieldSlots[field++] = cumul;
-            } else {
-                cumul += staticDataLen;
-                decodedFieldSlots[field++] = cumul;
-            }
-        }
-        dataStartOff = encDec.getPos();
-    }
+		// decode field slots
+		int field = 0;
+		int cumul = 0;
+		int end = fieldStartIndex + fieldCount;
+		encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
+		for (int i = fieldStartIndex; i < end; i++) {
+			int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
+			if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
+				cumul += encDec.decode();
+				decodedFieldSlots[field++] = cumul;
+			} else {
+				cumul += staticDataLen;
+				decodedFieldSlots[field++] = cumul;
+			}
+		}
+		dataStartOff = encDec.getPos();
+	}
 
-    @Override
-    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-        resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
-    }
+	@Override
+	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+		resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+	}
 
-    @Override
-    public void setFieldCount(int fieldCount) {
-        this.fieldCount = fieldCount;
-        if (decodedFieldSlots == null) {
-            decodedFieldSlots = new int[fieldCount];
-        } else {
-            if (fieldCount > decodedFieldSlots.length) {
-                decodedFieldSlots = new int[fieldCount];
-            }
-        }
-        nullFlagsBytes = getNullFlagsBytes();
-        this.fieldStartIndex = 0;
-    }
+	@Override
+	public void setFieldCount(int fieldCount) {
+		this.fieldCount = fieldCount;
+		if (decodedFieldSlots == null) {
+			decodedFieldSlots = new int[fieldCount];
+		} else {
+			if (fieldCount > decodedFieldSlots.length) {
+				decodedFieldSlots = new int[fieldCount];
+			}
+		}
+		nullFlagsBytes = getNullFlagsBytes();
+		this.fieldStartIndex = 0;
+	}
 
-    @Override
-    public void setFieldCount(int fieldStartIndex, int fieldCount) {
-        setFieldCount(fieldCount);
-        this.fieldStartIndex = fieldStartIndex;
-    }
+	@Override
+	public void setFieldCount(int fieldStartIndex, int fieldCount) {
+		setFieldCount(fieldCount);
+		this.fieldStartIndex = fieldStartIndex;
+	}
 
-    @Override
-    public int getFieldCount() {
-        return fieldCount;
-    }
+	@Override
+	public int getFieldCount() {
+		return fieldCount;
+	}
 
-    @Override
-    public byte[] getFieldData(int fIdx) {
-        return buf.array();
-    }
+	@Override
+	public byte[] getFieldData(int fIdx) {
+		return buf.array();
+	}
 
-    @Override
-    public int getFieldLength(int fIdx) {
-        if (fIdx == 0) {
-            return decodedFieldSlots[0];
-        } else {
-            return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
-        }
-    }
+	@Override
+	public int getFieldLength(int fIdx) {
+		if (fIdx == 0) {
+			return decodedFieldSlots[0];
+		} else {
+			return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
+		}
+	}
 
-    @Override
-    public int getFieldStart(int fIdx) {
-        if (fIdx == 0) {
-            return dataStartOff;
-        } else {
-            return dataStartOff + decodedFieldSlots[fIdx - 1];
-        }
-    }
+	@Override
+	public int getFieldStart(int fIdx) {
+		if (fIdx == 0) {
+			return dataStartOff;
+		} else {
+			return dataStartOff + decodedFieldSlots[fIdx - 1];
+		}
+	}
 
-    protected int getNullFlagsBytes() {
-        return (int) Math.ceil(fieldCount / 8.0);
-    }
+	protected int getNullFlagsBytes() {
+		return (int) Math.ceil(fieldCount / 8.0);
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
index 81b48e5..95468d4 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -24,125 +24,131 @@
 
 public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
 
-    protected ITypeTrait[] typeTraits;
-    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+	protected ITypeTrait[] typeTraits;
+	protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
 
-    public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
+	public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+		this.typeTraits = typeTraits;
+	}
 
-    @Override
-    public int bytesRequired(ITupleReference tuple) {
-        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
+	@Override
+	public int bytesRequired(ITupleReference tuple) {
+		int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			bytes += tuple.getFieldLength(i);
+		}
+		return bytes;
+	}
 
-    @Override
-    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
-        int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
-        for (int i = startField; i < startField + numFields; i++) {
-            bytes += tuple.getFieldLength(i);
-        }
-        return bytes;
-    }
+	@Override
+	public int bytesRequired(ITupleReference tuple, int startField,
+			int numFields) {
+		int bytes = getNullFlagsBytes(numFields)
+				+ getFieldSlotsBytes(tuple, startField, numFields);
+		for (int i = startField; i < startField + numFields; i++) {
+			bytes += tuple.getFieldLength(i);
+		}
+		return bytes;
+	}
 
-    @Override
-    public ITreeIndexTupleReference createTupleReference() {
-        return new TypeAwareTupleReference(typeTraits);
-    }
+	@Override
+	public ITreeIndexTupleReference createTupleReference() {
+		return new TypeAwareTupleReference(typeTraits);
+	}
 
-    @Override
-    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(tuple);
-        // write null indicator bits
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf.put(runner++, (byte) 0);
-        }
+	@Override
+	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
+			int targetOff) {
+		int runner = targetOff;
+		int nullFlagsBytes = getNullFlagsBytes(tuple);
+		// write null indicator bits
+		for (int i = 0; i < nullFlagsBytes; i++) {
+			targetBuf.put(runner++, (byte) 0);
+		}
 
-        // write field slots for variable length fields
-        encDec.reset(targetBuf.array(), runner);
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-                encDec.encode(tuple.getFieldLength(i));
-            }
-        }
-        runner = encDec.getPos();
+		// write field slots for variable length fields
+		encDec.reset(targetBuf.array(), runner);
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+				encDec.encode(tuple.getFieldLength(i));
+			}
+		}
+		runner = encDec.getPos();
 
-        // write data fields
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
-                    tuple.getFieldLength(i));
-            runner += tuple.getFieldLength(i);
-        }
+		// write data fields
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
+					targetBuf.array(), runner, tuple.getFieldLength(i));
+			runner += tuple.getFieldLength(i);
+		}
 
-        return runner - targetOff;
-    }
+		return runner - targetOff;
+	}
 
-    @Override
-    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
-            int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(numFields);
-        // write null indicator bits
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf.put(runner++, (byte) 0);
-        }
+	@Override
+	public int writeTupleFields(ITupleReference tuple, int startField,
+			int numFields, ByteBuffer targetBuf, int targetOff) {
+		int runner = targetOff;
+		int nullFlagsBytes = getNullFlagsBytes(numFields);
+		// write null indicator bits
+		for (int i = 0; i < nullFlagsBytes; i++) {
+			targetBuf.put(runner++, (byte) 0);
+		}
 
-        // write field slots for variable length fields
-        encDec.reset(targetBuf.array(), runner);
-        for (int i = startField; i < startField + numFields; i++) {
-            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-                encDec.encode(tuple.getFieldLength(i));
-            }
-        }
-        runner = encDec.getPos();
+		// write field slots for variable length fields
+		encDec.reset(targetBuf.array(), runner);
+		for (int i = startField; i < startField + numFields; i++) {
+			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+				encDec.encode(tuple.getFieldLength(i));
+			}
+		}
+		runner = encDec.getPos();
 
-        for (int i = startField; i < startField + numFields; i++) {
-            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
-                    tuple.getFieldLength(i));
-            runner += tuple.getFieldLength(i);
-        }
+		for (int i = startField; i < startField + numFields; i++) {
+			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
+					targetBuf.array(), runner, tuple.getFieldLength(i));
+			runner += tuple.getFieldLength(i);
+		}
 
-        return runner - targetOff;
-    }
+		return runner - targetOff;
+	}
 
-    protected int getNullFlagsBytes(ITupleReference tuple) {
-        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-    }
+	protected int getNullFlagsBytes(ITupleReference tuple) {
+		return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+	}
 
-    protected int getFieldSlotsBytes(ITupleReference tuple) {
-        int fieldSlotBytes = 0;
-        for (int i = 0; i < tuple.getFieldCount(); i++) {
-            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
-            }
-        }
-        return fieldSlotBytes;
-    }
+	protected int getFieldSlotsBytes(ITupleReference tuple) {
+		int fieldSlotBytes = 0;
+		for (int i = 0; i < tuple.getFieldCount(); i++) {
+			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+				fieldSlotBytes += encDec.getBytesRequired(tuple
+						.getFieldLength(i));
+			}
+		}
+		return fieldSlotBytes;
+	}
 
-    protected int getNullFlagsBytes(int numFields) {
-        return (int) Math.ceil((double) numFields / 8.0);
-    }
+	protected int getNullFlagsBytes(int numFields) {
+		return (int) Math.ceil((double) numFields / 8.0);
+	}
 
-    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
-        int fieldSlotBytes = 0;
-        for (int i = startField; i < startField + numFields; i++) {
-            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
-            }
-        }
-        return fieldSlotBytes;
-    }
+	protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
+			int numFields) {
+		int fieldSlotBytes = 0;
+		for (int i = startField; i < startField + numFields; i++) {
+			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+				fieldSlotBytes += encDec.getBytesRequired(tuple
+						.getFieldLength(i));
+			}
+		}
+		return fieldSlotBytes;
+	}
 
-    public ITypeTrait[] getTypeTraits() {
-        return typeTraits;
-    }
+	public ITypeTrait[] getTypeTraits() {
+		return typeTraits;
+	}
 
-    public void setTypeTraits(ITypeTrait[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
+	public void setTypeTraits(ITypeTrait[] typeTraits) {
+		this.typeTraits = typeTraits;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
index 82072ae..28e2535 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
@@ -19,18 +19,19 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 
-public class TypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
+public class TypeAwareTupleWriterFactory implements
+		ITreeIndexTupleWriterFactory {
 
-    private static final long serialVersionUID = 1L;
-    private ITypeTrait[] typeTraits;
+	private static final long serialVersionUID = 1L;
+	private ITypeTrait[] typeTraits;
 
-    public TypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
+	public TypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+		this.typeTraits = typeTraits;
+	}
 
-    @Override
-    public ITreeIndexTupleWriter createTupleWriter() {
-        return new TypeAwareTupleWriter(typeTraits);
-    }
+	@Override
+	public ITreeIndexTupleWriter createTupleWriter() {
+		return new TypeAwareTupleWriter(typeTraits);
+	}
 
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
index d266d41..979bbd3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
@@ -18,71 +18,71 @@
 // encodes positive integers in a variable-byte format
 
 public class VarLenIntEncoderDecoder {
-    public static final int ENCODE_MASK = 0x0000007F;
-    public static final byte CONTINUE_CHUNK = (byte) 0x80;
-    public static final byte DECODE_MASK = (byte) 0x7F;
+	public static final int ENCODE_MASK = 0x0000007F;
+	public static final byte CONTINUE_CHUNK = (byte) 0x80;
+	public static final byte DECODE_MASK = (byte) 0x7F;
 
-    private byte[] encTmp = new byte[5];
+	private byte[] encTmp = new byte[5];
 
-    private int pos;
-    private byte[] bytes;
+	private int pos;
+	private byte[] bytes;
 
-    public void reset(byte[] bytes, int pos) {
-        this.bytes = bytes;
-        this.pos = pos;
-    }
+	public void reset(byte[] bytes, int pos) {
+		this.bytes = bytes;
+		this.pos = pos;
+	}
 
-    public int encode(int val) {
-        int origPos = 0;
-        int tmpPos = 0;
-        while (val > ENCODE_MASK) {
-            encTmp[tmpPos++] = (byte) (val & ENCODE_MASK);
-            val = val >>> 7;
-        }
-        encTmp[tmpPos++] = (byte) (val);
+	public int encode(int val) {
+		int origPos = 0;
+		int tmpPos = 0;
+		while (val > ENCODE_MASK) {
+			encTmp[tmpPos++] = (byte) (val & ENCODE_MASK);
+			val = val >>> 7;
+		}
+		encTmp[tmpPos++] = (byte) (val);
 
-        // reverse order to optimize for decoding speed
-        for (int i = 0; i < tmpPos - 1; i++) {
-            bytes[pos++] = (byte) (encTmp[tmpPos - 1 - i] | CONTINUE_CHUNK);
-        }
-        bytes[pos++] = encTmp[0];
+		// reverse order to optimize for decoding speed
+		for (int i = 0; i < tmpPos - 1; i++) {
+			bytes[pos++] = (byte) (encTmp[tmpPos - 1 - i] | CONTINUE_CHUNK);
+		}
+		bytes[pos++] = encTmp[0];
 
-        return pos - origPos;
-    }
+		return pos - origPos;
+	}
 
-    public int decode() {
-        int sum = 0;
-        while ((bytes[pos] & CONTINUE_CHUNK) == CONTINUE_CHUNK) {
-            sum = (sum + (bytes[pos] & DECODE_MASK)) << 7;
-            pos++;
-        }
-        sum += bytes[pos++];
-        return sum;
-    }
+	public int decode() {
+		int sum = 0;
+		while ((bytes[pos] & CONTINUE_CHUNK) == CONTINUE_CHUNK) {
+			sum = (sum + (bytes[pos] & DECODE_MASK)) << 7;
+			pos++;
+		}
+		sum += bytes[pos++];
+		return sum;
+	}
 
-    // calculate the number of bytes needed for encoding
-    public int getBytesRequired(int val) {
-        int byteCount = 0;
-        while (val > ENCODE_MASK) {
-            val = val >>> 7;
-            byteCount++;
-        }
-        return byteCount + 1;
-    }
+	// calculate the number of bytes needed for encoding
+	public int getBytesRequired(int val) {
+		int byteCount = 0;
+		while (val > ENCODE_MASK) {
+			val = val >>> 7;
+			byteCount++;
+		}
+		return byteCount + 1;
+	}
 
-    public int getPos() {
-        return pos;
-    }
+	public int getPos() {
+		return pos;
+	}
 
-    // fast encoding, slow decoding version
-    /*
-     * public void encode(int val) { while(val > ENCODE_MASK) { bytes[pos++] =
-     * (byte)(((byte)(val & ENCODE_MASK)) | CONTINUE_CHUNK); val = val >>> 7; }
-     * bytes[pos++] = (byte)(val); }
-     * 
-     * public int decode() { int sum = 0; int shift = 0; while( (bytes[pos] &
-     * CONTINUE_CHUNK) == CONTINUE_CHUNK) { sum = (sum + (bytes[pos] &
-     * DECODE_MASK)) << 7 * shift++; pos++; } sum += bytes[pos++] << 7 * shift;
-     * return sum; }
-     */
+	// fast encoding, slow decoding version
+	/*
+	 * public void encode(int val) { while(val > ENCODE_MASK) { bytes[pos++] =
+	 * (byte)(((byte)(val & ENCODE_MASK)) | CONTINUE_CHUNK); val = val >>> 7; }
+	 * bytes[pos++] = (byte)(val); }
+	 * 
+	 * public int decode() { int sum = 0; int shift = 0; while( (bytes[pos] &
+	 * CONTINUE_CHUNK) == CONTINUE_CHUNK) { sum = (sum + (bytes[pos] &
+	 * DECODE_MASK)) << 7 * shift++; pos++; } sum += bytes[pos++] << 7 * shift;
+	 * return sum; }
+	 */
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
index eb261da..8179d58 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
@@ -13,72 +13,76 @@
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
 
 public class TreeIndexBufferCacheWarmup {
-    private final IBufferCache bufferCache;
-    private final IFreePageManager freePageManager;
-    private final int fileId;
-    private final ArrayList<IntArrayList> pagesByLevel = new ArrayList<IntArrayList>();
-    private final Random rnd = new Random();
+	private final IBufferCache bufferCache;
+	private final IFreePageManager freePageManager;
+	private final int fileId;
+	private final ArrayList<IntArrayList> pagesByLevel = new ArrayList<IntArrayList>();
+	private final Random rnd = new Random();
 
-    public TreeIndexBufferCacheWarmup(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId) {
-        this.bufferCache = bufferCache;
-        this.freePageManager = freePageManager;
-        this.fileId = fileId;
-    }
+	public TreeIndexBufferCacheWarmup(IBufferCache bufferCache,
+			IFreePageManager freePageManager, int fileId) {
+		this.bufferCache = bufferCache;
+		this.freePageManager = freePageManager;
+		this.fileId = fileId;
+	}
 
-    public void warmup(ITreeIndexFrame frame, ITreeIndexMetaDataFrame metaFrame, int[] warmupTreeLevels,
-            int[] warmupRepeats) throws HyracksDataException {
-        bufferCache.openFile(fileId);
+	public void warmup(ITreeIndexFrame frame,
+			ITreeIndexMetaDataFrame metaFrame, int[] warmupTreeLevels,
+			int[] warmupRepeats) throws HyracksDataException {
+		bufferCache.openFile(fileId);
 
-        // scan entire file to determine pages in each level
-        int maxPageId = freePageManager.getMaxPage(metaFrame);
-        for (int pageId = 0; pageId <= maxPageId; pageId++) {
-            ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            page.acquireReadLatch();
-            try {
-                frame.setPage(page);
-                byte level = frame.getLevel();
-                while (level >= pagesByLevel.size()) {
-                    pagesByLevel.add(new IntArrayList(100, 100));
-                }
-                if (level >= 0) {
-                    // System.out.println("ADDING: " + level + " " + pageId);
-                    pagesByLevel.get(level).add(pageId);
-                }
-            } finally {
-                page.releaseReadLatch();
-                bufferCache.unpin(page);
-            }
-        }
+		// scan entire file to determine pages in each level
+		int maxPageId = freePageManager.getMaxPage(metaFrame);
+		for (int pageId = 0; pageId <= maxPageId; pageId++) {
+			ICachedPage page = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+			page.acquireReadLatch();
+			try {
+				frame.setPage(page);
+				byte level = frame.getLevel();
+				while (level >= pagesByLevel.size()) {
+					pagesByLevel.add(new IntArrayList(100, 100));
+				}
+				if (level >= 0) {
+					// System.out.println("ADDING: " + level + " " + pageId);
+					pagesByLevel.get(level).add(pageId);
+				}
+			} finally {
+				page.releaseReadLatch();
+				bufferCache.unpin(page);
+			}
+		}
 
-        // pin certain pages again to simulate frequent access
-        for (int i = 0; i < warmupTreeLevels.length; i++) {
-            if (warmupTreeLevels[i] < pagesByLevel.size()) {
-                int repeats = warmupRepeats[i];
-                IntArrayList pageIds = pagesByLevel.get(warmupTreeLevels[i]);
-                int[] remainingPageIds = new int[pageIds.size()];
-                for (int r = 0; r < repeats; r++) {
-                    for (int j = 0; j < pageIds.size(); j++) {
-                        remainingPageIds[j] = pageIds.get(j);
-                    }
+		// pin certain pages again to simulate frequent access
+		for (int i = 0; i < warmupTreeLevels.length; i++) {
+			if (warmupTreeLevels[i] < pagesByLevel.size()) {
+				int repeats = warmupRepeats[i];
+				IntArrayList pageIds = pagesByLevel.get(warmupTreeLevels[i]);
+				int[] remainingPageIds = new int[pageIds.size()];
+				for (int r = 0; r < repeats; r++) {
+					for (int j = 0; j < pageIds.size(); j++) {
+						remainingPageIds[j] = pageIds.get(j);
+					}
 
-                    int remainingLength = pageIds.size();
-                    for (int j = 0; j < pageIds.size(); j++) {
-                        int index = Math.abs(rnd.nextInt()) % remainingLength;
-                        int pageId = remainingPageIds[index];
+					int remainingLength = pageIds.size();
+					for (int j = 0; j < pageIds.size(); j++) {
+						int index = Math.abs(rnd.nextInt()) % remainingLength;
+						int pageId = remainingPageIds[index];
 
-                        // pin & latch then immediately unlatch & unpin
-                        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                        page.acquireReadLatch();
-                        page.releaseReadLatch();
-                        bufferCache.unpin(page);
+						// pin & latch then immediately unlatch & unpin
+						ICachedPage page = bufferCache.pin(BufferedFileHandle
+								.getDiskPageId(fileId, pageId), false);
+						page.acquireReadLatch();
+						page.releaseReadLatch();
+						bufferCache.unpin(page);
 
-                        remainingPageIds[index] = remainingPageIds[remainingLength - 1];
-                        remainingLength--;
-                    }
-                }
-            }
-        }
+						remainingPageIds[index] = remainingPageIds[remainingLength - 1];
+						remainingLength--;
+					}
+				}
+			}
+		}
 
-        bufferCache.closeFile(fileId);
-    }
+		bufferCache.closeFile(fileId);
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
index 5b01b2d..2754743 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
@@ -8,127 +8,140 @@
 
 public class TreeIndexStats {
 
-    private TreeIndexNodeTypeStats rootStats = new TreeIndexNodeTypeStats();
-    private TreeIndexNodeTypeStats interiorStats = new TreeIndexNodeTypeStats();
-    private TreeIndexNodeTypeStats leafStats = new TreeIndexNodeTypeStats();
+	private TreeIndexNodeTypeStats rootStats = new TreeIndexNodeTypeStats();
+	private TreeIndexNodeTypeStats interiorStats = new TreeIndexNodeTypeStats();
+	private TreeIndexNodeTypeStats leafStats = new TreeIndexNodeTypeStats();
 
-    private int freePages = 0;
-    private int metaPages = 0;
-    private int treeLevels = 0;
+	private int freePages = 0;
+	private int metaPages = 0;
+	private int treeLevels = 0;
 
-    public void begin() {
-        rootStats.clear();
-        interiorStats.clear();
-        leafStats.clear();
-        freePages = 0;
-        metaPages = 0;
-        treeLevels = 0;
-    }
+	public void begin() {
+		rootStats.clear();
+		interiorStats.clear();
+		leafStats.clear();
+		freePages = 0;
+		metaPages = 0;
+		treeLevels = 0;
+	}
 
-    public void addRoot(ITreeIndexFrame frame) {
-        treeLevels = frame.getLevel() + 1;
-        rootStats.add(frame);
-    }
+	public void addRoot(ITreeIndexFrame frame) {
+		treeLevels = frame.getLevel() + 1;
+		rootStats.add(frame);
+	}
 
-    public void add(ITreeIndexFrame frame) {
-        if (frame.isLeaf()) {
-            leafStats.add(frame);
-        } else if (frame.isInterior()) {
-            interiorStats.add(frame);
-        }
-    }
+	public void add(ITreeIndexFrame frame) {
+		if (frame.isLeaf()) {
+			leafStats.add(frame);
+		} else if (frame.isInterior()) {
+			interiorStats.add(frame);
+		}
+	}
 
-    public void add(ITreeIndexMetaDataFrame metaFrame, IFreePageManager freePageManager) {
-        if (freePageManager.isFreePage(metaFrame)) {
-            freePages++;
-        } else if (freePageManager.isMetaPage(metaFrame)) {
-            metaPages++;
-        }
-    }
+	public void add(ITreeIndexMetaDataFrame metaFrame,
+			IFreePageManager freePageManager) {
+		if (freePageManager.isFreePage(metaFrame)) {
+			freePages++;
+		} else if (freePageManager.isMetaPage(metaFrame)) {
+			metaPages++;
+		}
+	}
 
-    public void end() {
-        // nothing here currently
-    }
+	public void end() {
+		// nothing here currently
+	}
 
-    @Override
-    public String toString() {
-        StringBuilder strBuilder = new StringBuilder();
-        DecimalFormat df = new DecimalFormat("#####.##");
+	@Override
+	public String toString() {
+		StringBuilder strBuilder = new StringBuilder();
+		DecimalFormat df = new DecimalFormat("#####.##");
 
-        strBuilder.append("TREE LEVELS:  " + treeLevels + "\n");
-        strBuilder.append("FREE PAGES :  " + freePages + "\n");
-        strBuilder.append("META PAGES :  " + metaPages + "\n");
-        long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
-        strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
+		strBuilder.append("TREE LEVELS:  " + treeLevels + "\n");
+		strBuilder.append("FREE PAGES :  " + freePages + "\n");
+		strBuilder.append("META PAGES :  " + metaPages + "\n");
+		long totalPages = interiorStats.getNumPages() + leafStats.getNumPages()
+				+ freePages + metaPages;
+		strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
 
-        strBuilder.append("\n");
-        strBuilder.append("ROOT STATS" + "\n");
-        strBuilder.append("NUM TUPLES:      " + rootStats.getNumTuples() + "\n");
-        strBuilder.append("FILL FACTOR    : " + df.format(rootStats.getAvgFillFactor()) + "\n");
+		strBuilder.append("\n");
+		strBuilder.append("ROOT STATS" + "\n");
+		strBuilder
+				.append("NUM TUPLES:      " + rootStats.getNumTuples() + "\n");
+		strBuilder.append("FILL FACTOR    : "
+				+ df.format(rootStats.getAvgFillFactor()) + "\n");
 
-        if (interiorStats.getNumPages() > 0) {
-            strBuilder.append("\n");
-            strBuilder.append("INTERIOR STATS" + "\n");
-            strBuilder.append("NUM PAGES:       " + interiorStats.getNumPages() + "\n");
-            strBuilder.append("NUM TUPLES:      " + interiorStats.getNumTuples() + "\n");
-            strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
-            strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
-        }
+		if (interiorStats.getNumPages() > 0) {
+			strBuilder.append("\n");
+			strBuilder.append("INTERIOR STATS" + "\n");
+			strBuilder.append("NUM PAGES:       " + interiorStats.getNumPages()
+					+ "\n");
+			strBuilder.append("NUM TUPLES:      "
+					+ interiorStats.getNumTuples() + "\n");
+			strBuilder.append("AVG TUPLES/PAGE: "
+					+ df.format(interiorStats.getAvgNumTuples()) + "\n");
+			strBuilder.append("AVG FILL FACTOR: "
+					+ df.format(interiorStats.getAvgFillFactor()) + "\n");
+		}
 
-        if (leafStats.getNumPages() > 0) {
-            strBuilder.append("\n");
-            strBuilder.append("LEAF STATS" + "\n");
-            strBuilder.append("NUM PAGES:       " + df.format(leafStats.getNumPages()) + "\n");
-            strBuilder.append("NUM TUPLES:      " + df.format(leafStats.getNumTuples()) + "\n");
-            strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
-            strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
-        }
+		if (leafStats.getNumPages() > 0) {
+			strBuilder.append("\n");
+			strBuilder.append("LEAF STATS" + "\n");
+			strBuilder.append("NUM PAGES:       "
+					+ df.format(leafStats.getNumPages()) + "\n");
+			strBuilder.append("NUM TUPLES:      "
+					+ df.format(leafStats.getNumTuples()) + "\n");
+			strBuilder.append("AVG TUPLES/PAGE: "
+					+ df.format(leafStats.getAvgNumTuples()) + "\n");
+			strBuilder.append("AVG FILL FACTOR: "
+					+ df.format(leafStats.getAvgFillFactor()) + "\n");
+		}
 
-        return strBuilder.toString();
-    }
+		return strBuilder.toString();
+	}
 
-    public class TreeIndexNodeTypeStats {
-        private long numTuples;
-        private long sumTuplesSizes;
-        private long numPages;
-        private double sumFillFactors;
+	public class TreeIndexNodeTypeStats {
+		private long numTuples;
+		private long sumTuplesSizes;
+		private long numPages;
+		private double sumFillFactors;
 
-        public void clear() {
-            numTuples = 0;
-            sumTuplesSizes = 0;
-            numPages = 0;
-        }
+		public void clear() {
+			numTuples = 0;
+			sumTuplesSizes = 0;
+			numPages = 0;
+		}
 
-        public void add(ITreeIndexFrame frame) {
-            numPages++;
-            numTuples += frame.getTupleCount();
-            sumFillFactors += (double) (frame.getBuffer().capacity() - frame.getTotalFreeSpace())
-                    / (double) frame.getBuffer().capacity();
-        }
+		public void add(ITreeIndexFrame frame) {
+			numPages++;
+			numTuples += frame.getTupleCount();
+			sumFillFactors += (double) (frame.getBuffer().capacity() - frame
+					.getTotalFreeSpace())
+					/ (double) frame.getBuffer().capacity();
+		}
 
-        public long getNumTuples() {
-            return numTuples;
-        }
+		public long getNumTuples() {
+			return numTuples;
+		}
 
-        public long getSumTupleSizes() {
-            return sumTuplesSizes;
-        }
+		public long getSumTupleSizes() {
+			return sumTuplesSizes;
+		}
 
-        public long getNumPages() {
-            return numPages;
-        }
+		public long getNumPages() {
+			return numPages;
+		}
 
-        public double getAvgNumTuples() {
-            return (double) numTuples / (double) numPages;
-        }
+		public double getAvgNumTuples() {
+			return (double) numTuples / (double) numPages;
+		}
 
-        public double getAvgTupleSize() {
-            return (double) sumTuplesSizes / (double) numTuples;
-        }
+		public double getAvgTupleSize() {
+			return (double) sumTuplesSizes / (double) numTuples;
+		}
 
-        public double getAvgFillFactor() {
-            return sumFillFactors / numPages;
-        }
-    }
+		public double getAvgFillFactor() {
+			return sumFillFactors / numPages;
+		}
+	}
 
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
index fc0ab5e..9167732 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
@@ -10,61 +10,64 @@
 
 public class TreeIndexStatsGatherer {
 
-    private final TreeIndexStats treeIndexStats = new TreeIndexStats();
-    private final IBufferCache bufferCache;
-    private final IFreePageManager freePageManager;
-    private final int fileId;
-    private final int rootPage;
+	private final TreeIndexStats treeIndexStats = new TreeIndexStats();
+	private final IBufferCache bufferCache;
+	private final IFreePageManager freePageManager;
+	private final int fileId;
+	private final int rootPage;
 
-    public TreeIndexStatsGatherer(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId, int rootPage) {
-        this.bufferCache = bufferCache;
-        this.freePageManager = freePageManager;
-        this.fileId = fileId;
-        this.rootPage = rootPage;
-    }
+	public TreeIndexStatsGatherer(IBufferCache bufferCache,
+			IFreePageManager freePageManager, int fileId, int rootPage) {
+		this.bufferCache = bufferCache;
+		this.freePageManager = freePageManager;
+		this.fileId = fileId;
+		this.rootPage = rootPage;
+	}
 
-    public TreeIndexStats gatherStats(ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+	public TreeIndexStats gatherStats(ITreeIndexFrame leafFrame,
+			ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame)
+			throws HyracksDataException {
 
-        bufferCache.openFile(fileId);
+		bufferCache.openFile(fileId);
 
-        treeIndexStats.begin();
+		treeIndexStats.begin();
 
-        int maxPageId = freePageManager.getMaxPage(metaFrame);
-        for (int pageId = 0; pageId <= maxPageId; pageId++) {
-            ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            page.acquireReadLatch();
-            try {
-                metaFrame.setPage(page);
-                leafFrame.setPage(page);
-                interiorFrame.setPage(page);
+		int maxPageId = freePageManager.getMaxPage(metaFrame);
+		for (int pageId = 0; pageId <= maxPageId; pageId++) {
+			ICachedPage page = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+			page.acquireReadLatch();
+			try {
+				metaFrame.setPage(page);
+				leafFrame.setPage(page);
+				interiorFrame.setPage(page);
 
-                if (leafFrame.isLeaf()) {
-                    if (pageId == rootPage) {
-                        treeIndexStats.addRoot(leafFrame);
-                    } else {
-                        treeIndexStats.add(leafFrame);
-                    }
-                } else if (interiorFrame.isInterior()) {
-                    if (pageId == rootPage) {
-                        treeIndexStats.addRoot(interiorFrame);
-                    } else {
-                        treeIndexStats.add(interiorFrame);
-                    }
-                } else {
-                    treeIndexStats.add(metaFrame, freePageManager);
-                }
+				if (leafFrame.isLeaf()) {
+					if (pageId == rootPage) {
+						treeIndexStats.addRoot(leafFrame);
+					} else {
+						treeIndexStats.add(leafFrame);
+					}
+				} else if (interiorFrame.isInterior()) {
+					if (pageId == rootPage) {
+						treeIndexStats.addRoot(interiorFrame);
+					} else {
+						treeIndexStats.add(interiorFrame);
+					}
+				} else {
+					treeIndexStats.add(metaFrame, freePageManager);
+				}
 
-            } finally {
-                page.releaseReadLatch();
-                bufferCache.unpin(page);
-            }
-        }
+			} finally {
+				page.releaseReadLatch();
+				bufferCache.unpin(page);
+			}
+		}
 
-        treeIndexStats.end();
+		treeIndexStats.end();
 
-        bufferCache.closeFile(fileId);
+		bufferCache.closeFile(fileId);
 
-        return treeIndexStats;
-    }
+		return treeIndexStats;
+	}
 }
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
index 0d0c5bf..9db23ef 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
@@ -21,6 +21,7 @@
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
@@ -93,6 +94,11 @@
     public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
         return btreeComparatorFactories;
     }
+    
+    @Override
+    public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories() {
+        return null;
+    }
 
     @Override
     public ITypeTrait[] getTreeIndexTypeTraits() {
@@ -148,4 +154,4 @@
     public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
         return opHelperFactory;
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
index ebc17b5..e3b9b96 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
@@ -25,24 +25,23 @@
 
 public interface IRTreeFrame extends ITreeIndexFrame {
 
-    public ITreeIndexTupleReference createTupleReference();
+	public ITreeIndexTupleReference createTupleReference();
 
-    public void generateDist(ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end);
+	public void computeMBR(ISplitKey splitKey, MultiComparator cmp);
 
-    public void computeMBR(ISplitKey splitKey, MultiComparator cmp);
+	public void insert(ITupleReference tuple, MultiComparator cmp,
+			int tupleIndex) throws Exception;
 
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
+	public void delete(int tupleIndex, MultiComparator cmp) throws Exception;
 
-    public void delete(int tupleIndex, MultiComparator cmp) throws Exception;
+	public int getPageNsn();
 
-    public int getPageNsn();
+	public void setPageNsn(int pageNsn);
 
-    public void setPageNsn(int pageNsn);
+	public int getRightPage();
 
-    public int getRightPage();
+	public void setRightPage(int rightPage);
 
-    public void setRightPage(int rightPage);
+	public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp);
 
-    public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp);
-
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index 824c6b0..4c251d2 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -21,19 +21,23 @@
 
 public interface IRTreeInteriorFrame extends IRTreeFrame {
 
-    public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
+	public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
 
-    public int getBestChildPageId(MultiComparator cmp);
+	public int getBestChildPageId(MultiComparator cmp);
 
-    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+	public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex,
+			MultiComparator cmp);
 
-    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
+	public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
 
-    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentId, MultiComparator cmp);
+	public int findTupleByPointer(ITupleReference tuple, PathList traverseList,
+			int parentId, MultiComparator cmp);
 
-    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+	public void adjustKey(ITupleReference tuple, int tupleIndex,
+			MultiComparator cmp);
 
-    public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+	public boolean recomputeMBR(ITupleReference tuple, int tupleIndex,
+			MultiComparator cmp);
 
-    public void enlarge(ITupleReference tuple, MultiComparator cmp);
+	public void enlarge(ITupleReference tuple, MultiComparator cmp);
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
index c85712d..3005785 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
@@ -20,7 +20,8 @@
 
 public interface IRTreeLeafFrame extends IRTreeFrame {
 
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp);
+	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp);
 
-    public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+	public boolean intersect(ITupleReference tuple, int tupleIndex,
+			MultiComparator cmp);
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
index 2fcb065..cb7c9d9 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
@@ -16,8 +16,10 @@
 package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
@@ -31,20 +33,32 @@
 
 public class RTreeOpHelper extends TreeIndexOpHelper {
 
-    protected MultiComparator interiorCmp;
+	public RTreeOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+			IHyracksTaskContext ctx, int partition, IndexHelperOpenMode mode) {
+		super(opDesc, ctx, partition, mode);
+	}
 
-    public RTreeOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, IHyracksTaskContext ctx, int partition,
-            IndexHelperOpenMode mode) {
-        super(opDesc, ctx, partition, mode);
-    }
+	public ITreeIndex createTreeIndex() throws HyracksDataException {
+		IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(
+				ctx);
+		ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
+		IFreePageManager freePageManager = new LinkedListFreePageManager(
+				bufferCache, indexFileId, 0, metaDataFrameFactory);
 
-    public ITreeIndex createTreeIndex() throws HyracksDataException {
-        IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
-        ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, indexFileId, 0,
-                metaDataFrameFactory);
+		return new RTree(bufferCache, freePageManager,
+				opDesc.getTreeIndexInteriorFactory(),
+				opDesc.getTreeIndexLeafFactory(), cmp);
+	}
 
-        return new RTree(bufferCache, freePageManager, opDesc.getTreeIndexInteriorFactory(),
-                opDesc.getTreeIndexLeafFactory(), cmp);
-    }
-}
+	public MultiComparator createMultiComparator(IBinaryComparator[] comparators)
+			throws HyracksDataException {
+		IPrimitiveValueProvider[] keyValueProvider = new IPrimitiveValueProvider[opDesc
+				.getTreeIndexValueProviderFactories().length];
+		for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
+			keyValueProvider[i] = opDesc.getTreeIndexValueProviderFactories()[i]
+					.createPrimitiveValueProvider();
+		}
+		return new MultiComparator(opDesc.getTreeIndexTypeTraits(),
+				comparators, keyValueProvider);
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
index 75668e1..6afc37e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
@@ -23,11 +23,12 @@
 
 public class RTreeOpHelperFactory implements ITreeIndexOpHelperFactory {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    @Override
-    public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
-            IHyracksTaskContext ctx, int partition, IndexHelperOpenMode mode) {
-        return new RTreeOpHelper(opDesc, ctx, partition, mode);
-    }
+	@Override
+	public TreeIndexOpHelper createTreeIndexOpHelper(
+			ITreeIndexOperatorDescriptorHelper opDesc, IHyracksTaskContext ctx,
+			int partition, IndexHelperOpenMode mode) {
+		return new RTreeOpHelper(opDesc, ctx, partition, mode);
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
index 3dc0963..f5741c0 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
@@ -31,25 +32,35 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 
-public class RTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+public class RTreeSearchOperatorDescriptor extends
+		AbstractTreeIndexOperatorDescriptor {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    private int[] keyFields; // fields in input tuple to be used as keys
+	private int[] keyFields; // fields in input tuple to be used as keys
 
-    public RTreeSearchOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
-            IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
-            IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
-            ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
-            IBinaryComparatorFactory[] comparatorFactories, int[] keyFields, ITreeIndexOpHelperFactory opHelperFactory) {
-        super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
-        this.keyFields = keyFields;
-    }
+	public RTreeSearchOperatorDescriptor(JobSpecification spec,
+			RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+			IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+			IFileSplitProvider fileSplitProvider,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+			IBinaryComparatorFactory[] comparatorFactories,
+			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			int[] keyFields, ITreeIndexOpHelperFactory opHelperFactory) {
+		super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider,
+				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
+				typeTraits, comparatorFactories, valueProviderFactories,
+				opHelperFactory);
+		this.keyFields = keyFields;
+	}
 
-    @Override
-    public IOperatorNodePushable createPushRuntime(final IHyracksTaskContext ctx, final IOperatorEnvironment env,
-            IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
-        return new RTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, keyFields);
-    }
-}
+	@Override
+	public IOperatorNodePushable createPushRuntime(
+			final IHyracksTaskContext ctx, final IOperatorEnvironment env,
+			IRecordDescriptorProvider recordDescProvider, int partition,
+			int nPartitions) {
+		return new RTreeSearchOperatorNodePushable(this, ctx, partition,
+				recordDescProvider, keyFields);
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
index 69e48c0..c9a4d6b 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
@@ -29,6 +29,7 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
@@ -44,138 +45,157 @@
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
 
-public class RTreeSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
-    private TreeIndexOpHelper treeIndexOpHelper;
-    private FrameTupleAccessor accessor;
+public class RTreeSearchOperatorNodePushable extends
+		AbstractUnaryInputUnaryOutputOperatorNodePushable {
+	private TreeIndexOpHelper treeIndexOpHelper;
+	private FrameTupleAccessor accessor;
 
-    private ByteBuffer writeBuffer;
-    private FrameTupleAppender appender;
-    private ArrayTupleBuilder tb;
-    private DataOutput dos;
+	private ByteBuffer writeBuffer;
+	private FrameTupleAppender appender;
+	private ArrayTupleBuilder tb;
+	private DataOutput dos;
 
-    private RTree rtree;
-    private PermutingFrameTupleReference searchKey;
-    private SearchPredicate searchPred;
-    private MultiComparator cmp;
-    private ITreeIndexCursor cursor;
-    private ITreeIndexFrame interiorFrame;
-    private ITreeIndexFrame leafFrame;
-    private RTreeOpContext opCtx;
+	private RTree rtree;
+	private PermutingFrameTupleReference searchKey;
+	private SearchPredicate searchPred;
+	private MultiComparator cmp;
+	private ITreeIndexCursor cursor;
+	private ITreeIndexFrame interiorFrame;
+	private ITreeIndexFrame leafFrame;
+	private RTreeOpContext opCtx;
 
-    private RecordDescriptor recDesc;
+	private RecordDescriptor recDesc;
 
-    public RTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksTaskContext ctx,
-            int partition, IRecordDescriptorProvider recordDescProvider, int[] keyFields) {
-        treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
-                IndexHelperOpenMode.OPEN);
+	public RTreeSearchOperatorNodePushable(
+			AbstractTreeIndexOperatorDescriptor opDesc,
+			IHyracksTaskContext ctx, int partition,
+			IRecordDescriptorProvider recordDescProvider, int[] keyFields) {
+		treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory()
+				.createTreeIndexOpHelper(opDesc, ctx, partition,
+						IndexHelperOpenMode.OPEN);
 
-        this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
-        if (keyFields != null && keyFields.length > 0) {
-            searchKey = new PermutingFrameTupleReference();
-            searchKey.setFieldPermutation(keyFields);
-        }
-    }
+		this.recDesc = recordDescProvider.getInputRecordDescriptor(
+				opDesc.getOperatorId(), 0);
+		if (keyFields != null && keyFields.length > 0) {
+			searchKey = new PermutingFrameTupleReference();
+			searchKey.setFieldPermutation(keyFields);
+		}
+	}
 
-    @Override
-    public void open() throws HyracksDataException {
-        AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
-                .getOperatorDescriptor();
-        accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksTaskContext().getFrameSize(), recDesc);
+	@Override
+	public void open() throws HyracksDataException {
+		AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+				.getOperatorDescriptor();
+		accessor = new FrameTupleAccessor(treeIndexOpHelper
+				.getHyracksTaskContext().getFrameSize(), recDesc);
 
-        interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
-        leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
-        cursor = new RTreeSearchCursor((IRTreeInteriorFrame) interiorFrame, (IRTreeLeafFrame) leafFrame);
+		interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+		leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+		cursor = new RTreeSearchCursor((IRTreeInteriorFrame) interiorFrame,
+				(IRTreeLeafFrame) leafFrame);
 
-        writer.open();
+		try {
 
-        try {
+			treeIndexOpHelper.init();
+			rtree = (RTree) treeIndexOpHelper.getTreeIndex();
 
-            treeIndexOpHelper.init();
-            rtree = (RTree) treeIndexOpHelper.getTreeIndex();
+			int keySearchFields = rtree.getCmp().getComparators().length;
 
-            int keySearchFields = rtree.getCmp().getComparators().length;
+			IBinaryComparator[] keySearchComparators = new IBinaryComparator[keySearchFields];
+			for (int i = 0; i < keySearchFields; i++) {
+				keySearchComparators[i] = rtree.getCmp().getComparators()[i];
+			}
 
-            IBinaryComparator[] keySearchComparators = new IBinaryComparator[keySearchFields];
-            for (int i = 0; i < keySearchFields; i++) {
-                keySearchComparators[i] = rtree.getCmp().getComparators()[i];
-            }
-            cmp = new MultiComparator(rtree.getCmp().getTypeTraits(), keySearchComparators);
-            searchPred = new SearchPredicate(searchKey, cmp);
+			IPrimitiveValueProvider[] keyValueProvider = new IPrimitiveValueProvider[keySearchFields];
+			for (int i = 0; i < keySearchFields; i++) {
+				keyValueProvider[i] = rtree.getCmp().getValueProviders()[i];
+			}
 
-            writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(rtree.getCmp().getFieldCount());
-            dos = tb.getDataOutput();
-            appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
-            appender.reset(writeBuffer, true);
+			cmp = new MultiComparator(rtree.getCmp().getTypeTraits(),
+					keySearchComparators, keyValueProvider);
 
-            opCtx = rtree.createOpContext(IndexOp.SEARCH, treeIndexOpHelper.getLeafFrame(),
-                    treeIndexOpHelper.getInteriorFrame(), null);
+			searchPred = new SearchPredicate(searchKey, cmp);
 
-        } catch (Exception e) {
-            treeIndexOpHelper.deinit();
-            throw new HyracksDataException(e);
-        }
-    }
+			writeBuffer = treeIndexOpHelper.getHyracksTaskContext()
+					.allocateFrame();
+			tb = new ArrayTupleBuilder(rtree.getCmp().getFieldCount());
+			dos = tb.getDataOutput();
+			appender = new FrameTupleAppender(treeIndexOpHelper
+					.getHyracksTaskContext().getFrameSize());
+			appender.reset(writeBuffer, true);
 
-    private void writeSearchResults() throws Exception {
-        while (cursor.hasNext()) {
-            tb.reset();
-            cursor.next();
+			opCtx = rtree.createOpContext(IndexOp.SEARCH,
+					treeIndexOpHelper.getLeafFrame(),
+					treeIndexOpHelper.getInteriorFrame(), null);
 
-            ITupleReference frameTuple = cursor.getTuple();
-            for (int i = 0; i < frameTuple.getFieldCount(); i++) {
-                dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
-                tb.addFieldEndOffset();
-            }
+		} catch (Exception e) {
+			treeIndexOpHelper.deinit();
+		}
+	}
 
-            if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-                appender.reset(writeBuffer, true);
-                if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
-                    throw new IllegalStateException();
-                }
-            }
-        }
-    }
+	private void writeSearchResults() throws Exception {
+		while (cursor.hasNext()) {
+			tb.reset();
+			cursor.next();
 
-    @Override
-    public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
-        accessor.reset(buffer);
+			ITupleReference frameTuple = cursor.getTuple();
+			for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+				dos.write(frameTuple.getFieldData(i),
+						frameTuple.getFieldStart(i),
+						frameTuple.getFieldLength(i));
+				tb.addFieldEndOffset();
+			}
 
-        int tupleCount = accessor.getTupleCount();
-        try {
-            for (int i = 0; i < tupleCount; i++) {
-                searchKey.reset(accessor, i);
+			if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+					tb.getSize())) {
+				FrameUtils.flushFrame(writeBuffer, writer);
+				appender.reset(writeBuffer, true);
+				if (!appender.append(tb.getFieldEndOffsets(),
+						tb.getByteArray(), 0, tb.getSize())) {
+					throw new IllegalStateException();
+				}
+			}
+		}
+	}
 
-                searchPred.setSearchKey(searchKey);
-                cursor.reset();
-                rtree.search(cursor, searchPred, opCtx);
-                writeSearchResults();
-            }
-        } catch (Exception e) {
-            throw new HyracksDataException(e);
-        }
-    }
+	@Override
+	public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+		accessor.reset(buffer);
 
-    @Override
-    public void close() throws HyracksDataException {
-        try {
-            if (appender.getTupleCount() > 0) {
-                FrameUtils.flushFrame(writeBuffer, writer);
-            }
-            writer.close();
-            try {
-                cursor.close();
-            } catch (Exception e) {
-                throw new HyracksDataException(e);
-            }
-        } finally {
-            treeIndexOpHelper.deinit();
-        }
-    }
+		int tupleCount = accessor.getTupleCount();
+		try {
+			for (int i = 0; i < tupleCount; i++) {
+				searchKey.reset(accessor, i);
 
-    @Override
-    public void fail() throws HyracksDataException {
-        writer.fail();
-    }
+				searchPred.setSearchKey(searchKey);
+				cursor.reset();
+				rtree.search(cursor, searchPred, opCtx);
+				writeSearchResults();
+			}
+		} catch (Exception e) {
+			throw new HyracksDataException(e);
+		}
+	}
+
+	@Override
+	public void close() throws HyracksDataException {
+		try {
+			if (appender.getTupleCount() > 0) {
+				FrameUtils.flushFrame(writeBuffer, writer);
+			}
+			writer.close();
+			try {
+				cursor.close();
+			} catch (Exception e) {
+				throw new HyracksDataException(e);
+			}
+		} finally {
+			treeIndexOpHelper.deinit();
+		}
+	}
+
+	@Override
+	public void fail() throws HyracksDataException {
+		writer.fail();
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index 63870df..e809884 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -17,21 +17,14 @@
 
 import java.util.ArrayList;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IGenericPrimitiveSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoubleGenericPrimitiveSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.FloatGenericPrimitiveSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerGenericPrimitiveSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
@@ -50,35 +43,17 @@
 	protected TupleEntryArrayList tupleEntries2; // used for split
 
 	protected Rectangle[] rec;
-	protected IGenericPrimitiveSerializerDeserializer[] recDescSers;
 
 	protected static final double splitFactor = 0.4;
 	protected static final int nearMinimumOverlapFactor = 32;
 	private static final double doubleEpsilon = computeDoubleEpsilon();
 	private static final int numTuplesEntries = 100;
 
-	public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter,
-			ISerializerDeserializer[] recDescSers, int keyFieldCount) {
+	public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
 		super(tupleWriter, new UnorderedSlotManager());
 		this.tuples = new ITreeIndexTupleReference[keyFieldCount];
-		this.recDescSers = new IGenericPrimitiveSerializerDeserializer[keyFieldCount];
 		for (int i = 0; i < keyFieldCount; i++) {
 			this.tuples[i] = tupleWriter.createTupleReference();
-
-			if (recDescSers[i].getClass().equals(
-					IntegerSerializerDeserializer.INSTANCE.getClass())) {
-				this.recDescSers[i] = IntegerGenericPrimitiveSerializerDeserializer.INSTANCE;
-			} else if (recDescSers[i].getClass().equals(
-					FloatSerializerDeserializer.INSTANCE.getClass())) {
-				this.recDescSers[i] = FloatGenericPrimitiveSerializerDeserializer.INSTANCE;
-			} else if (recDescSers[i].getClass().equals(
-					DoubleSerializerDeserializer.INSTANCE.getClass())) {
-				this.recDescSers[i] = DoubleGenericPrimitiveSerializerDeserializer.INSTANCE;
-			} else {
-				throw new RuntimeException(
-						"Key type not supported. The supported key types are int, float, and double ");
-
-			}
 		}
 		cmpFrameTuple = tupleWriter.createTupleReference();
 
@@ -88,7 +63,7 @@
 				numTuplesEntries);
 		rec = new Rectangle[4];
 		for (int i = 0; i < 4; i++) {
-			rec[i] = new Rectangle(keyFieldCount / 2, this.recDescSers);
+			rec[i] = new Rectangle(keyFieldCount / 2);
 		}
 	}
 
@@ -166,21 +141,22 @@
 	}
 
 	public void generateDist(ITupleReference tuple,
-			TupleEntryArrayList entries, Rectangle rec, int start, int end) {
+			TupleEntryArrayList entries, Rectangle rec, int start, int end,
+			MultiComparator cmp) {
 		int j = 0;
 		while (entries.get(j).getTupleIndex() == -1) {
 			j++;
 		}
 		frameTuple.resetByTupleIndex(this, entries.get(j).getTupleIndex());
-		rec.set(frameTuple);
+		rec.set(frameTuple, cmp);
 		for (int i = start; i < end; ++i) {
 			if (i != j) {
 				if (entries.get(i).getTupleIndex() != -1) {
 					frameTuple.resetByTupleIndex(this, entries.get(i)
 							.getTupleIndex());
-					rec.enlarge(frameTuple);
+					rec.enlarge(frameTuple, cmp);
 				} else {
-					rec.enlarge(tuple);
+					rec.enlarge(tuple, cmp);
 				}
 			}
 		}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index d7ddf66..38d2a46 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -46,8 +46,8 @@
 	private static final int childPtrSize = 4;
 
 	public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter,
-			ISerializerDeserializer[] recDescSers, int keyFieldCount) {
-		super(tupleWriter, recDescSers, keyFieldCount);
+			int keyFieldCount) {
+		super(tupleWriter, keyFieldCount);
 	}
 
 	@Override
@@ -219,7 +219,6 @@
 				return -1;
 			}
 		}
-		// return buf.getInt(frameTuple.getFieldStart(cmp.getKeyFieldCount()));
 		return buf.getInt(getChildPointerOff(frameTuple, cmp));
 	}
 
@@ -347,21 +346,20 @@
 			for (int k = 0; k < getTupleCount(); ++k) {
 
 				frameTuple.resetByTupleIndex(this, k);
-
-				double LowerKey = recDescSers[i]
+				double LowerKey = cmp.getValueProviders()[i]
 						.getValue(frameTuple.getFieldData(i),
 								frameTuple.getFieldStart(i));
-				double UpperKey = recDescSers[j]
+				double UpperKey = cmp.getValueProviders()[j]
 						.getValue(frameTuple.getFieldData(j),
 								frameTuple.getFieldStart(j));
 
 				tupleEntries1.add(k, LowerKey);
 				tupleEntries2.add(k, UpperKey);
 			}
-			double LowerKey = recDescSers[i].getValue(tuple.getFieldData(i),
-					tuple.getFieldStart(i));
-			double UpperKey = recDescSers[j].getValue(tuple.getFieldData(j),
-					tuple.getFieldStart(j));
+			double LowerKey = cmp.getValueProviders()[i].getValue(
+					tuple.getFieldData(i), tuple.getFieldStart(i));
+			double UpperKey = cmp.getValueProviders()[j].getValue(
+					tuple.getFieldData(j), tuple.getFieldStart(j));
 
 			tupleEntries1.add(-1, LowerKey);
 			tupleEntries2.add(-1, UpperKey);
@@ -374,12 +372,12 @@
 			for (int k = 1; k <= splitDistribution; ++k) {
 				int d = m - 1 + k;
 
-				generateDist(tuple, tupleEntries1, rec[0], 0, d);
-				generateDist(tuple, tupleEntries2, rec[1], 0, d);
+				generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+				generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
 				generateDist(tuple, tupleEntries1, rec[2], d,
-						getTupleCount() + 1);
+						getTupleCount() + 1, cmp);
 				generateDist(tuple, tupleEntries2, rec[3], d,
-						getTupleCount() + 1);
+						getTupleCount() + 1, cmp);
 
 				// calculate the margin of the distributions
 				lowerMargin += rec[0].margin() + rec[2].margin();
@@ -400,12 +398,12 @@
 
 		for (int i = 0; i < getTupleCount(); ++i) {
 			frameTuple.resetByTupleIndex(this, i);
-			double key = recDescSers[splitAxis + sortOrder].getValue(
-					frameTuple.getFieldData(splitAxis + sortOrder),
-					frameTuple.getFieldStart(splitAxis + sortOrder));
+			double key = cmp.getValueProviders()[splitAxis + sortOrder]
+					.getValue(frameTuple.getFieldData(splitAxis + sortOrder),
+							frameTuple.getFieldStart(splitAxis + sortOrder));
 			tupleEntries1.add(i, key);
 		}
-		double key = recDescSers[splitAxis + sortOrder].getValue(
+		double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
 				tuple.getFieldData(splitAxis + sortOrder),
 				tuple.getFieldStart(splitAxis + sortOrder));
 		tupleEntries1.add(-1, key);
@@ -417,8 +415,9 @@
 		for (int i = 1; i <= splitDistribution; ++i) {
 			int d = m - 1 + i;
 
-			generateDist(tuple, tupleEntries1, rec[0], 0, d);
-			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+			generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
+					cmp);
 
 			double overlap = rec[0].overlappedArea(rec[2]);
 			if (overlap < minOverlap) {
@@ -589,10 +588,10 @@
 						tupleToBeInserted.getFieldStart(i),
 						tupleToBeInserted.getFieldLength(i));
 				if (c < 0) {
-					pLow1 = recDescSers[i].getValue(tuple1.getFieldData(i),
-							tuple1.getFieldStart(i));
+					pLow1 = cmp.getValueProviders()[i].getValue(
+							tuple1.getFieldData(i), tuple1.getFieldStart(i));
 				} else {
-					pLow1 = recDescSers[i].getValue(
+					pLow1 = cmp.getValueProviders()[i].getValue(
 							tupleToBeInserted.getFieldData(i),
 							tupleToBeInserted.getFieldStart(i));
 				}
@@ -603,24 +602,24 @@
 						tupleToBeInserted.getFieldStart(j),
 						tupleToBeInserted.getFieldLength(j));
 				if (c > 0) {
-					pHigh1 = recDescSers[j].getValue(tuple1.getFieldData(j),
-							tuple1.getFieldStart(j));
+					pHigh1 = cmp.getValueProviders()[j].getValue(
+							tuple1.getFieldData(j), tuple1.getFieldStart(j));
 				} else {
-					pHigh1 = recDescSers[j].getValue(
+					pHigh1 = cmp.getValueProviders()[j].getValue(
 							tupleToBeInserted.getFieldData(j),
 							tupleToBeInserted.getFieldStart(j));
 				}
 			} else {
-				pLow1 = recDescSers[i].getValue(tuple1.getFieldData(i),
-						tuple1.getFieldStart(i));
-				pHigh1 = recDescSers[j].getValue(tuple1.getFieldData(j),
-						tuple1.getFieldStart(j));
+				pLow1 = cmp.getValueProviders()[i].getValue(
+						tuple1.getFieldData(i), tuple1.getFieldStart(i));
+				pHigh1 = cmp.getValueProviders()[j].getValue(
+						tuple1.getFieldData(j), tuple1.getFieldStart(j));
 			}
 
-			double pLow2 = recDescSers[i].getValue(tuple2.getFieldData(i),
-					tuple2.getFieldStart(i));
-			double pHigh2 = recDescSers[j].getValue(tuple2.getFieldData(j),
-					tuple2.getFieldStart(j));
+			double pLow2 = cmp.getValueProviders()[i].getValue(
+					tuple2.getFieldData(i), tuple2.getFieldStart(i));
+			double pHigh2 = cmp.getValueProviders()[j].getValue(
+					tuple2.getFieldData(j), tuple2.getFieldStart(j));
 
 			if (pLow1 > pHigh2 || pHigh1 < pLow2) {
 				return 0.0;
@@ -648,10 +647,10 @@
 					tupleToBeInserted.getFieldStart(i),
 					tupleToBeInserted.getFieldLength(i));
 			if (c < 0) {
-				pLow = recDescSers[i].getValue(tuple.getFieldData(i),
-						tuple.getFieldStart(i));
+				pLow = cmp.getValueProviders()[i].getValue(
+						tuple.getFieldData(i), tuple.getFieldStart(i));
 			} else {
-				pLow = recDescSers[i].getValue(
+				pLow = cmp.getValueProviders()[i].getValue(
 						tupleToBeInserted.getFieldData(i),
 						tupleToBeInserted.getFieldStart(i));
 			}
@@ -662,10 +661,10 @@
 					tupleToBeInserted.getFieldStart(j),
 					tupleToBeInserted.getFieldLength(j));
 			if (c > 0) {
-				pHigh = recDescSers[j].getValue(tuple.getFieldData(j),
-						tuple.getFieldStart(j));
+				pHigh = cmp.getValueProviders()[j].getValue(
+						tuple.getFieldData(j), tuple.getFieldStart(j));
 			} else {
-				pHigh = recDescSers[j].getValue(
+				pHigh = cmp.getValueProviders()[j].getValue(
 						tupleToBeInserted.getFieldData(j),
 						tupleToBeInserted.getFieldStart(j));
 			}
@@ -679,10 +678,10 @@
 		int maxFieldPos = cmp.getKeyFieldCount() / 2;
 		for (int i = 0; i < maxFieldPos; i++) {
 			int j = maxFieldPos + i;
-			area *= recDescSers[j].getValue(tuple.getFieldData(j),
+			area *= cmp.getValueProviders()[j].getValue(tuple.getFieldData(j),
 					tuple.getFieldStart(j))
-					- recDescSers[i].getValue(tuple.getFieldData(i),
-							tuple.getFieldStart(i));
+					- cmp.getValueProviders()[i].getValue(
+							tuple.getFieldData(i), tuple.getFieldStart(i));
 		}
 		return area;
 	}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
index 004c519..100b4d8 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
@@ -24,21 +23,21 @@
 
 	private static final long serialVersionUID = 1L;
 	private ITreeIndexTupleWriterFactory tupleWriterFactory;
-	private ISerializerDeserializer[] recDescSers;
 	private int keyFieldCount;
 
 	public RTreeNSMInteriorFrameFactory(
-			ITreeIndexTupleWriterFactory tupleWriterFactory,
-			ISerializerDeserializer[] recDescSers, int keyFieldCount) {
+			ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
 		this.tupleWriterFactory = tupleWriterFactory;
-		this.recDescSers = recDescSers;
+		if (keyFieldCount % 2 != 0) {
+			throw new IllegalArgumentException(
+					"The key has different number of dimensions.");
+		}
 		this.keyFieldCount = keyFieldCount;
 	}
 
 	@Override
 	public IRTreeInteriorFrame createFrame() {
 		return new RTreeNSMInteriorFrame(
-				tupleWriterFactory.createTupleWriter(), recDescSers,
-				keyFieldCount);
+				tupleWriterFactory.createTupleWriter(), keyFieldCount);
 	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index 29536a8..fa2ad32 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
@@ -32,8 +31,8 @@
 public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
 
 	public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter,
-			ISerializerDeserializer[] recDescSers, int keyFieldCount) {
-		super(tupleWriter, recDescSers, keyFieldCount);
+			int keyFieldCount) {
+		super(tupleWriter, keyFieldCount);
 	}
 
 	@Override
@@ -96,20 +95,20 @@
 
 				frameTuple.resetByTupleIndex(this, k);
 
-				double LowerKey = recDescSers[i]
+				double LowerKey = cmp.getValueProviders()[i]
 						.getValue(frameTuple.getFieldData(i),
 								frameTuple.getFieldStart(i));
-				double UpperKey = recDescSers[j]
+				double UpperKey = cmp.getValueProviders()[j]
 						.getValue(frameTuple.getFieldData(j),
 								frameTuple.getFieldStart(j));
 
 				tupleEntries1.add(k, LowerKey);
 				tupleEntries2.add(k, UpperKey);
 			}
-			double LowerKey = recDescSers[i].getValue(tuple.getFieldData(i),
-					tuple.getFieldStart(i));
-			double UpperKey = recDescSers[j].getValue(tuple.getFieldData(j),
-					tuple.getFieldStart(j));
+			double LowerKey = cmp.getValueProviders()[i].getValue(
+					tuple.getFieldData(i), tuple.getFieldStart(i));
+			double UpperKey = cmp.getValueProviders()[j].getValue(
+					tuple.getFieldData(j), tuple.getFieldStart(j));
 
 			tupleEntries1.add(-1, LowerKey);
 			tupleEntries2.add(-1, UpperKey);
@@ -122,12 +121,12 @@
 			for (int k = 1; k <= splitDistribution; ++k) {
 				int d = m - 1 + k;
 
-				generateDist(tuple, tupleEntries1, rec[0], 0, d);
-				generateDist(tuple, tupleEntries2, rec[1], 0, d);
+				generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+				generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
 				generateDist(tuple, tupleEntries1, rec[2], d,
-						getTupleCount() + 1);
+						getTupleCount() + 1, cmp);
 				generateDist(tuple, tupleEntries2, rec[3], d,
-						getTupleCount() + 1);
+						getTupleCount() + 1, cmp);
 
 				// calculate the margin of the distributions
 				lowerMargin += rec[0].margin() + rec[2].margin();
@@ -148,12 +147,12 @@
 
 		for (int i = 0; i < getTupleCount(); ++i) {
 			frameTuple.resetByTupleIndex(this, i);
-			double key = recDescSers[splitAxis + sortOrder].getValue(
-					frameTuple.getFieldData(splitAxis + sortOrder),
-					frameTuple.getFieldStart(splitAxis + sortOrder));
+			double key = cmp.getValueProviders()[splitAxis + sortOrder]
+					.getValue(frameTuple.getFieldData(splitAxis + sortOrder),
+							frameTuple.getFieldStart(splitAxis + sortOrder));
 			tupleEntries1.add(i, key);
 		}
-		double key = recDescSers[splitAxis + sortOrder].getValue(
+		double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
 				tuple.getFieldData(splitAxis + sortOrder),
 				tuple.getFieldStart(splitAxis + sortOrder));
 		tupleEntries1.add(-1, key);
@@ -165,8 +164,9 @@
 		for (int i = 1; i <= splitDistribution; ++i) {
 			int d = m - 1 + i;
 
-			generateDist(tuple, tupleEntries1, rec[0], 0, d);
-			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+			generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
+					cmp);
 
 			double overlap = rec[0].overlappedArea(rec[2]);
 			if (overlap < minOverlap) {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
index a470623..51a047e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -15,7 +15,6 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
@@ -24,20 +23,21 @@
 
 	private static final long serialVersionUID = 1L;
 	private ITreeIndexTupleWriterFactory tupleWriterFactory;
-	private ISerializerDeserializer[] recDescSers;
 	private int keyFieldCount;
 
 	public RTreeNSMLeafFrameFactory(
-			ITreeIndexTupleWriterFactory tupleWriterFactory,
-			ISerializerDeserializer[] recDescSers, int keyFieldCount) {
+			ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
 		this.tupleWriterFactory = tupleWriterFactory;
-		this.recDescSers = recDescSers;
+		if (keyFieldCount % 2 != 0) {
+			throw new IllegalArgumentException(
+					"The key has different number of dimensions.");
+		}
 		this.keyFieldCount = keyFieldCount;
 	}
 
 	@Override
 	public IRTreeLeafFrame createFrame() {
 		return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(),
-				recDescSers, keyFieldCount);
+				keyFieldCount);
 	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoubleGenericPrimitiveSerializerDeserializer.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoubleGenericPrimitiveSerializerDeserializer.java
deleted file mode 100644
index c24eef4..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoubleGenericPrimitiveSerializerDeserializer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IGenericPrimitiveSerializerDeserializer;
-
-public class DoubleGenericPrimitiveSerializerDeserializer extends
-		DoubleSerializerDeserializer implements
-		IGenericPrimitiveSerializerDeserializer<Double> {
-
-	private static final long serialVersionUID = 1L;
-
-	public static final DoubleGenericPrimitiveSerializerDeserializer INSTANCE = new DoubleGenericPrimitiveSerializerDeserializer();
-
-	@Override
-	public double getValue(byte[] bytes, int offset) {
-		return DoubleSerializerDeserializer.getDouble(bytes, offset);
-	}
-
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java
new file mode 100644
index 0000000..a590ef5
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/DoublePrimitiveValueProviderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+public class DoublePrimitiveValueProviderFactory implements
+		IPrimitiveValueProviderFactory {
+	private static final long serialVersionUID = 1L;
+
+	public static final DoublePrimitiveValueProviderFactory INSTANCE = new DoublePrimitiveValueProviderFactory();
+
+	private DoublePrimitiveValueProviderFactory() {
+	}
+
+	@Override
+	public IPrimitiveValueProvider createPrimitiveValueProvider() {
+		return new IPrimitiveValueProvider() {
+			@Override
+			public double getValue(byte[] bytes, int offset) {
+				return DoubleSerializerDeserializer.getDouble(bytes, offset);
+			}
+		};
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
index 8dce1ac..91fca6a 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 public enum EntriesOrder {
-    ASCENDING, DESCENDING
+	ASCENDING, DESCENDING
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatGenericPrimitiveSerializerDeserializer.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatGenericPrimitiveSerializerDeserializer.java
deleted file mode 100644
index 4a2fac6..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatGenericPrimitiveSerializerDeserializer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IGenericPrimitiveSerializerDeserializer;
-
-public class FloatGenericPrimitiveSerializerDeserializer extends
-		FloatSerializerDeserializer implements
-		IGenericPrimitiveSerializerDeserializer<Float> {
-
-	private static final long serialVersionUID = 1L;
-
-	public static final FloatGenericPrimitiveSerializerDeserializer INSTANCE = new FloatGenericPrimitiveSerializerDeserializer();
-
-	@Override
-	public double getValue(byte[] bytes, int offset) {
-		return FloatSerializerDeserializer.getFloat(bytes, offset);
-	}
-
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java
new file mode 100644
index 0000000..605f73a
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/FloatPrimitiveValueProviderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+public class FloatPrimitiveValueProviderFactory implements
+		IPrimitiveValueProviderFactory {
+	private static final long serialVersionUID = 1L;
+
+	public static final FloatPrimitiveValueProviderFactory INSTANCE = new FloatPrimitiveValueProviderFactory();
+
+	private FloatPrimitiveValueProviderFactory() {
+	}
+
+	@Override
+	public IPrimitiveValueProvider createPrimitiveValueProvider() {
+		return new IPrimitiveValueProvider() {
+			@Override
+			public double getValue(byte[] bytes, int offset) {
+				return FloatSerializerDeserializer.getFloat(bytes, offset);
+			}
+		};
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerGenericPrimitiveSerializerDeserializer.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerGenericPrimitiveSerializerDeserializer.java
deleted file mode 100644
index 835d911..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerGenericPrimitiveSerializerDeserializer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IGenericPrimitiveSerializerDeserializer;
-
-public class IntegerGenericPrimitiveSerializerDeserializer extends
-		IntegerSerializerDeserializer implements
-		IGenericPrimitiveSerializerDeserializer<Integer> {
-
-	private static final long serialVersionUID = 1L;
-
-	public static final IntegerGenericPrimitiveSerializerDeserializer INSTANCE = new IntegerGenericPrimitiveSerializerDeserializer();
-
-	@Override
-	public double getValue(byte[] bytes, int offset) {
-		return IntegerSerializerDeserializer.getInt(bytes, offset);
-	}
-
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java
new file mode 100644
index 0000000..d1457db
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/IntegerPrimitiveValueProviderFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+public class IntegerPrimitiveValueProviderFactory implements
+		IPrimitiveValueProviderFactory {
+	private static final long serialVersionUID = 1L;
+
+	public static final IntegerPrimitiveValueProviderFactory INSTANCE = new IntegerPrimitiveValueProviderFactory();
+
+	private IntegerPrimitiveValueProviderFactory() {
+	}
+
+	@Override
+	public IPrimitiveValueProvider createPrimitiveValueProvider() {
+		return new IPrimitiveValueProvider() {
+			@Override
+			public double getValue(byte[] bytes, int offset) {
+				return IntegerSerializerDeserializer.getInt(bytes, offset);
+			}
+		};
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
index 58bad69..d66d0a0 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
@@ -18,93 +18,93 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
 
 public class PathList {
-    private IntArrayList pageIds;
-    private IntArrayList pageLsns;
-    private IntArrayList pageIndexes;
+	private IntArrayList pageIds;
+	private IntArrayList pageLsns;
+	private IntArrayList pageIndexes;
 
-    public PathList(int initialCapacity, int growth) {
-        pageIds = new IntArrayList(initialCapacity, growth);
-        pageLsns = new IntArrayList(initialCapacity, growth);
-        pageIndexes = new IntArrayList(initialCapacity, growth);
-    }
+	public PathList(int initialCapacity, int growth) {
+		pageIds = new IntArrayList(initialCapacity, growth);
+		pageLsns = new IntArrayList(initialCapacity, growth);
+		pageIndexes = new IntArrayList(initialCapacity, growth);
+	}
 
-    public int size() {
-        return pageIds.size();
-    }
+	public int size() {
+		return pageIds.size();
+	}
 
-    public int first() {
-        return pageIds.first();
-    }
+	public int first() {
+		return pageIds.first();
+	}
 
-    public void add(int pageId, int pageLsn, int pageIndex) {
-        pageIds.add(pageId);
-        pageLsns.add(pageLsn);
-        pageIndexes.add(pageIndex);
-    }
+	public void add(int pageId, int pageLsn, int pageIndex) {
+		pageIds.add(pageId);
+		pageLsns.add(pageLsn);
+		pageIndexes.add(pageIndex);
+	}
 
-    public int getFirstPageId() {
-        return pageIds.getFirst();
-    }
+	public int getFirstPageId() {
+		return pageIds.getFirst();
+	}
 
-    public int getFirstPageLsn() {
-        return pageLsns.getFirst();
-    }
+	public int getFirstPageLsn() {
+		return pageLsns.getFirst();
+	}
 
-    public int getFirstPageIndex() {
-        return pageIndexes.getFirst();
-    }
+	public int getFirstPageIndex() {
+		return pageIndexes.getFirst();
+	}
 
-    public int getLastPageId() {
-        return pageIds.getLast();
-    }
+	public int getLastPageId() {
+		return pageIds.getLast();
+	}
 
-    public int getLastPageLsn() {
-        return pageLsns.getLast();
-    }
+	public int getLastPageLsn() {
+		return pageLsns.getLast();
+	}
 
-    public int getLastPageIndex() {
-        return pageIndexes.getLast();
-    }
+	public int getLastPageIndex() {
+		return pageIndexes.getLast();
+	}
 
-    public int getPageId(int i) {
-        return pageIds.get(i);
-    }
+	public int getPageId(int i) {
+		return pageIds.get(i);
+	}
 
-    public int getPageLsn(int i) {
-        return pageLsns.get(i);
-    }
+	public int getPageLsn(int i) {
+		return pageLsns.get(i);
+	}
 
-    public int getPageIndex(int i) {
-        return pageIndexes.get(i);
-    }
+	public int getPageIndex(int i) {
+		return pageIndexes.get(i);
+	}
 
-    public void setPageLsn(int i, int pageLsn) {
-        pageLsns.set(i, pageLsn);
-    }
+	public void setPageLsn(int i, int pageLsn) {
+		pageLsns.set(i, pageLsn);
+	}
 
-    public void moveFirst() {
-        pageIds.moveFirst();
-        pageLsns.moveFirst();
-        pageIndexes.moveFirst();
-    }
+	public void moveFirst() {
+		pageIds.moveFirst();
+		pageLsns.moveFirst();
+		pageIndexes.moveFirst();
+	}
 
-    public void moveLast() {
-        pageIds.removeLast();
-        pageLsns.removeLast();
-        pageIndexes.removeLast();
-    }
+	public void moveLast() {
+		pageIds.removeLast();
+		pageLsns.removeLast();
+		pageIndexes.removeLast();
+	}
 
-    public boolean isLast() {
-        return pageIds.isLast();
-    }
+	public boolean isLast() {
+		return pageIds.isLast();
+	}
 
-    public void clear() {
-        pageIds.clear();
-        pageLsns.clear();
-        pageIndexes.clear();
-    }
+	public void clear() {
+		pageIds.clear();
+		pageLsns.clear();
+		pageIndexes.clear();
+	}
 
-    public boolean isEmpty() {
-        return pageIds.isEmpty();
-    }
+	public boolean isEmpty() {
+		return pageIds.isEmpty();
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index 8976409..03b2062 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -47,891 +47,972 @@
 
 public class RTree implements ITreeIndex {
 
-    private boolean created = false;
-    private boolean loaded = false;
-    private final int rootPage = 1; // the root page never changes
+	private boolean created = false;
+	private boolean loaded = false;
+	private final int rootPage = 1; // the root page never changes
 
-    private final AtomicInteger globalNsn; // Global node sequence number
-    private int numOfPages = 1;
-    private final ReadWriteLock treeLatch;
+	private final AtomicInteger globalNsn; // Global node sequence number
+	private int numOfPages = 1;
+	private final ReadWriteLock treeLatch;
 
-    private final IFreePageManager freePageManager;
-    private final IBufferCache bufferCache;
-    private int fileId;
+	private final IFreePageManager freePageManager;
+	private final IBufferCache bufferCache;
+	private int fileId;
 
-    private final SearchPredicate diskOrderScanPredicate;
-    private final ITreeIndexFrameFactory interiorFrameFactory;
-    private final ITreeIndexFrameFactory leafFrameFactory;
-    private final MultiComparator cmp;
+	private final SearchPredicate diskOrderScanPredicate;
+	private final ITreeIndexFrameFactory interiorFrameFactory;
+	private final ITreeIndexFrameFactory leafFrameFactory;
+	private final MultiComparator cmp;
 
-    public int rootSplits = 0;
-    public int[] splitsByLevel = new int[500];
-    public AtomicLong readLatchesAcquired = new AtomicLong();
-    public AtomicLong readLatchesReleased = new AtomicLong();
-    public AtomicLong writeLatchesAcquired = new AtomicLong();
-    public AtomicLong writeLatchesReleased = new AtomicLong();
-    public AtomicLong pins = new AtomicLong();
-    public AtomicLong unpins = new AtomicLong();
-    public byte currentLevel = 0;
+	public int rootSplits = 0;
+	public int[] splitsByLevel = new int[500];
+	public AtomicLong readLatchesAcquired = new AtomicLong();
+	public AtomicLong readLatchesReleased = new AtomicLong();
+	public AtomicLong writeLatchesAcquired = new AtomicLong();
+	public AtomicLong writeLatchesReleased = new AtomicLong();
+	public AtomicLong pins = new AtomicLong();
+	public AtomicLong unpins = new AtomicLong();
+	public byte currentLevel = 0;
 
-    public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
-        this.bufferCache = bufferCache;
-        this.freePageManager = freePageManager;
-        this.interiorFrameFactory = interiorFrameFactory;
-        this.leafFrameFactory = leafFrameFactory;
-        this.cmp = cmp;
-        globalNsn = new AtomicInteger();
-        this.treeLatch = new ReentrantReadWriteLock(true);
-        this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
-    }
+	public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
+			ITreeIndexFrameFactory interiorFrameFactory,
+			ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
+		this.bufferCache = bufferCache;
+		this.freePageManager = freePageManager;
+		this.interiorFrameFactory = interiorFrameFactory;
+		this.leafFrameFactory = leafFrameFactory;
+		this.cmp = cmp;
+		globalNsn = new AtomicInteger();
+		this.treeLatch = new ReentrantReadWriteLock(true);
+		this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
+	}
 
-    public void incrementGlobalNsn() {
-        globalNsn.incrementAndGet();
-    }
+	public void incrementGlobalNsn() {
+		globalNsn.incrementAndGet();
+	}
 
-    public int getGlobalNsn() {
-        return globalNsn.get();
-    }
+	public int getGlobalNsn() {
+		return globalNsn.get();
+	}
 
-    public void incrementReadLatchesAcquired() {
-        readLatchesAcquired.incrementAndGet();
-    }
+	public void incrementReadLatchesAcquired() {
+		readLatchesAcquired.incrementAndGet();
+	}
 
-    public void incrementReadLatchesReleased() {
-        readLatchesReleased.incrementAndGet();
-    }
+	public void incrementReadLatchesReleased() {
+		readLatchesReleased.incrementAndGet();
+	}
 
-    public void incrementWriteLatchesAcquired() {
-        writeLatchesAcquired.incrementAndGet();
-    }
+	public void incrementWriteLatchesAcquired() {
+		writeLatchesAcquired.incrementAndGet();
+	}
 
-    public void incrementWriteLatchesReleased() {
-        writeLatchesReleased.incrementAndGet();
-    }
+	public void incrementWriteLatchesReleased() {
+		writeLatchesReleased.incrementAndGet();
+	}
 
-    public void incrementPins() {
-        pins.incrementAndGet();
-    }
+	public void incrementPins() {
+		pins.incrementAndGet();
+	}
 
-    public void incrementUnpins() {
-        unpins.incrementAndGet();
-    }
+	public void incrementUnpins() {
+		unpins.incrementAndGet();
+	}
 
-    public String printStats() {
-        StringBuilder strBuilder = new StringBuilder();
-        strBuilder.append("\n");
-        strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
-        strBuilder.append("SPLITS BY LEVEL\n");
-        for (int i = 0; i < currentLevel; i++) {
-            strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
-        }
-        strBuilder.append(String.format("READ LATCHES:  %10d %10d\n", readLatchesAcquired.get(),
-                readLatchesReleased.get()));
-        strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired.get(),
-                writeLatchesReleased.get()));
-        strBuilder.append(String.format("PINS:          %10d %10d\n", pins.get(), unpins.get()));
+	public String printStats() {
+		StringBuilder strBuilder = new StringBuilder();
+		strBuilder.append("\n");
+		strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
+		strBuilder.append("SPLITS BY LEVEL\n");
+		for (int i = 0; i < currentLevel; i++) {
+			strBuilder.append(String.format("%3d ", i)
+					+ String.format("%8d ", splitsByLevel[i]) + "\n");
+		}
+		strBuilder.append(String.format("READ LATCHES:  %10d %10d\n",
+				readLatchesAcquired.get(), readLatchesReleased.get()));
+		strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n",
+				writeLatchesAcquired.get(), writeLatchesReleased.get()));
+		strBuilder.append(String.format("PINS:          %10d %10d\n",
+				pins.get(), unpins.get()));
 
-        strBuilder.append(String.format("Num of Pages:          %10d\n", numOfPages));
+		strBuilder.append(String.format("Num of Pages:          %10d\n",
+				numOfPages));
 
-        return strBuilder.toString();
-    }
+		return strBuilder.toString();
+	}
 
-    public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame, ISerializerDeserializer[] fields)
-            throws Exception {
-        printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
-    }
+	public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+			ISerializerDeserializer[] fields) throws Exception {
+		printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
+	}
 
-    public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeFrame leafFrame,
-            IRTreeFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
+	public void printTree(int pageId, ICachedPage parent, boolean unpin,
+			IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+			ISerializerDeserializer[] fields) throws Exception {
 
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        incrementPins();
-        node.acquireReadLatch();
-        incrementReadLatchesAcquired();
+		ICachedPage node = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+		incrementPins();
+		node.acquireReadLatch();
+		incrementReadLatchesAcquired();
 
-        try {
-            if (parent != null && unpin == true) {
-                parent.releaseReadLatch();
-                incrementReadLatchesReleased();
-                bufferCache.unpin(parent);
-                incrementUnpins();
-            }
+		try {
+			if (parent != null && unpin == true) {
+				parent.releaseReadLatch();
+				incrementReadLatchesReleased();
+				bufferCache.unpin(parent);
+				incrementUnpins();
+			}
 
-            interiorFrame.setPage(node);
-            int level = interiorFrame.getLevel();
+			interiorFrame.setPage(node);
+			int level = interiorFrame.getLevel();
 
-            System.out.format("%1d ", level);
-            System.out.format("%3d ", pageId);
-            for (int i = 0; i < currentLevel - level; i++)
-                System.out.format("    ");
+			System.out.format("%1d ", level);
+			System.out.format("%3d ", pageId);
+			for (int i = 0; i < currentLevel - level; i++)
+				System.out.format("    ");
 
-            String keyString;
-            if (interiorFrame.isLeaf()) {
-                leafFrame.setPage(node);
-                keyString = leafFrame.printKeys(cmp, fields);
-            } else {
-                keyString = interiorFrame.printKeys(cmp, fields);
-            }
+			String keyString;
+			if (interiorFrame.isLeaf()) {
+				leafFrame.setPage(node);
+				keyString = leafFrame.printKeys(cmp, fields);
+			} else {
+				keyString = interiorFrame.printKeys(cmp, fields);
+			}
 
-            System.out.format(keyString);
-            if (!interiorFrame.isLeaf()) {
-                ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame)).getChildren(cmp);
-                for (int i = 0; i < children.size(); i++) {
-                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
-                }
-            } else {
-                node.releaseReadLatch();
-                incrementReadLatchesReleased();
-                bufferCache.unpin(node);
-                incrementUnpins();
-            }
-        } catch (Exception e) {
-            node.releaseReadLatch();
-            incrementReadLatchesReleased();
-            bufferCache.unpin(node);
-            incrementUnpins();
-            throw e;
-        }
-    }
+			System.out.format(keyString);
+			if (!interiorFrame.isLeaf()) {
+				ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame))
+						.getChildren(cmp);
+				for (int i = 0; i < children.size(); i++) {
+					printTree(children.get(i), node, i == children.size() - 1,
+							leafFrame, interiorFrame, fields);
+				}
+			} else {
+				node.releaseReadLatch();
+				incrementReadLatchesReleased();
+				bufferCache.unpin(node);
+				incrementUnpins();
+			}
+		} catch (Exception e) {
+			node.releaseReadLatch();
+			incrementReadLatchesReleased();
+			bufferCache.unpin(node);
+			incrementUnpins();
+			throw e;
+		}
+	}
 
-    @Override
-    public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
-        if (created)
-            return;
+	@Override
+	public void create(int fileId, ITreeIndexFrame leafFrame,
+			ITreeIndexMetaDataFrame metaFrame) throws Exception {
+		if (created)
+			return;
 
-        treeLatch.writeLock().lock();
-        try {
-            // check if another thread beat us to it
-            if (created)
-                return;
+		treeLatch.writeLock().lock();
+		try {
+			// check if another thread beat us to it
+			if (created)
+				return;
 
-            freePageManager.init(metaFrame, rootPage);
+			freePageManager.init(metaFrame, rootPage);
 
-            // initialize root page
-            ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
-            incrementPins();
+			// initialize root page
+			ICachedPage rootNode = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+			incrementPins();
 
-            rootNode.acquireWriteLatch();
-            incrementWriteLatchesAcquired();
-            try {
-                leafFrame.setPage(rootNode);
-                leafFrame.initBuffer((byte) 0);
-            } finally {
-                rootNode.releaseWriteLatch();
-                incrementWriteLatchesReleased();
-                bufferCache.unpin(rootNode);
-                incrementUnpins();
-            }
-            currentLevel = 0;
+			rootNode.acquireWriteLatch();
+			incrementWriteLatchesAcquired();
+			try {
+				leafFrame.setPage(rootNode);
+				leafFrame.initBuffer((byte) 0);
+			} finally {
+				rootNode.releaseWriteLatch();
+				incrementWriteLatchesReleased();
+				bufferCache.unpin(rootNode);
+				incrementUnpins();
+			}
+			currentLevel = 0;
 
-            created = true;
-        } finally {
-            treeLatch.writeLock().unlock();
-        }
-    }
+			created = true;
+		} finally {
+			treeLatch.writeLock().unlock();
+		}
+	}
 
-    public void open(int fileId) {
-        this.fileId = fileId;
-    }
+	public void open(int fileId) {
+		this.fileId = fileId;
+	}
 
-    public void close() {
-        fileId = -1;
-    }
+	public void close() {
+		fileId = -1;
+	}
 
-    @Override
-    public RTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame) {
-        return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame, (IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
-    }
+	@Override
+	public RTreeOpContext createOpContext(IndexOp op,
+			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+			ITreeIndexMetaDataFrame metaFrame) {
+		return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame,
+				(IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
+	}
 
-    @Override
-    public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
-        RTreeOpContext ctx = (RTreeOpContext) ictx;
-        ctx.reset();
-        ctx.setTuple(tuple);
-        ctx.splitKey.reset();
-        ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
-        ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
-        ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-        ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+	@Override
+	public void insert(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception {
+		RTreeOpContext ctx = (RTreeOpContext) ictx;
+		ctx.reset();
+		ctx.setTuple(tuple);
+		ctx.splitKey.reset();
+		ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+		ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
+		ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+		ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
 
-        ICachedPage leafNode = findLeaf(ctx);
+		int maxFieldPos = cmp.getKeyFieldCount() / 2;
+		for (int i = 0; i < maxFieldPos; i++) {
+			int j = maxFieldPos + i;
+			int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
+					tuple.getFieldStart(i), tuple.getFieldLength(i),
+					tuple.getFieldData(j), tuple.getFieldStart(j),
+					tuple.getFieldLength(j));
+			if (c > 0) {
+				throw new IllegalArgumentException(
+						"The low key point has larger coordinates than the high key point.");
+			}
+		}
 
-        int pageId = ctx.pathList.getLastPageId();
-        ctx.pathList.moveLast();
-        insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+		ICachedPage leafNode = findLeaf(ctx);
 
-        while (true) {
-            if (ctx.splitKey.getLeftPageBuffer() != null) {
-                updateParentForInsert(ctx);
-            } else {
-                break;
-            }
-        }
+		int pageId = ctx.pathList.getLastPageId();
+		ctx.pathList.moveLast();
+		insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
 
-        leafNode.releaseWriteLatch();
-        incrementWriteLatchesReleased();
-        bufferCache.unpin(leafNode);
-        incrementUnpins();
-    }
+		while (true) {
+			if (ctx.splitKey.getLeftPageBuffer() != null) {
+				updateParentForInsert(ctx);
+			} else {
+				break;
+			}
+		}
 
-    public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
-        int pageId = rootPage;
-        boolean writeLatched = false;
-        ICachedPage node = null;
-        boolean isLeaf = false;
-        int pageLsn = 0, parentLsn = 0;
+		leafNode.releaseWriteLatch();
+		incrementWriteLatchesReleased();
+		bufferCache.unpin(leafNode);
+		incrementUnpins();
+	}
 
-        while (true) {
-            if (!writeLatched) {
-                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                incrementPins();
-                ctx.interiorFrame.setPage(node);
-                isLeaf = ctx.interiorFrame.isLeaf();
-                if (isLeaf) {
-                    node.acquireWriteLatch();
-                    incrementWriteLatchesAcquired();
-                    writeLatched = true;
+	public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
+		int pageId = rootPage;
+		boolean writeLatched = false;
+		ICachedPage node = null;
+		boolean isLeaf = false;
+		int pageLsn = 0, parentLsn = 0;
 
-                    if (!ctx.interiorFrame.isLeaf()) {
-                        node.releaseWriteLatch();
-                        incrementWriteLatchesReleased();
-                        bufferCache.unpin(node);
-                        incrementUnpins();
-                        writeLatched = false;
-                        continue;
-                    }
-                } else {
-                    // Be optimistic and grab read latch first. We will swap it
-                    // to write latch if we need to enlarge the best child
-                    // tuple.
-                    node.acquireReadLatch();
-                    incrementReadLatchesAcquired();
-                }
-            }
+		while (true) {
+			if (!writeLatched) {
+				node = bufferCache
+						.pin(BufferedFileHandle.getDiskPageId(fileId, pageId),
+								false);
+				incrementPins();
+				ctx.interiorFrame.setPage(node);
+				isLeaf = ctx.interiorFrame.isLeaf();
+				if (isLeaf) {
+					node.acquireWriteLatch();
+					incrementWriteLatchesAcquired();
+					writeLatched = true;
 
-            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                // Concurrent split detected, go back to parent and re-choose
-                // the best child
-                if (writeLatched) {
-                    node.releaseWriteLatch();
-                    incrementWriteLatchesReleased();
-                    bufferCache.unpin(node);
-                    incrementUnpins();
-                    writeLatched = false;
-                } else {
-                    node.releaseReadLatch();
-                    incrementReadLatchesReleased();
-                    bufferCache.unpin(node);
-                    incrementUnpins();
-                }
+					if (!ctx.interiorFrame.isLeaf()) {
+						node.releaseWriteLatch();
+						incrementWriteLatchesReleased();
+						bufferCache.unpin(node);
+						incrementUnpins();
+						writeLatched = false;
+						continue;
+					}
+				} else {
+					// Be optimistic and grab read latch first. We will swap it
+					// to write latch if we need to enlarge the best child
+					// tuple.
+					node.acquireReadLatch();
+					incrementReadLatchesAcquired();
+				}
+			}
 
-                pageId = ctx.pathList.getLastPageId();
-                if (pageId != rootPage) {
-                    parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
-                }
-                ctx.pathList.moveLast();
-                continue;
-            }
+			if (pageId != rootPage
+					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
+				// Concurrent split detected, go back to parent and re-choose
+				// the best child
+				if (writeLatched) {
+					node.releaseWriteLatch();
+					incrementWriteLatchesReleased();
+					bufferCache.unpin(node);
+					incrementUnpins();
+					writeLatched = false;
+				} else {
+					node.releaseReadLatch();
+					incrementReadLatchesReleased();
+					bufferCache.unpin(node);
+					incrementUnpins();
+				}
 
-            pageLsn = ctx.interiorFrame.getPageLsn();
-            ctx.pathList.add(pageId, pageLsn, -1);
+				pageId = ctx.pathList.getLastPageId();
+				if (pageId != rootPage) {
+					parentLsn = ctx.pathList
+							.getPageLsn(ctx.pathList.size() - 2);
+				}
+				ctx.pathList.moveLast();
+				continue;
+			}
 
-            if (!isLeaf) {
-                // findBestChild must be called *before* getBestChildPageId
-                ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
-                int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
+			pageLsn = ctx.interiorFrame.getPageLsn();
+			ctx.pathList.add(pageId, pageLsn, -1);
 
-                if (!writeLatched) {
-                    node.releaseReadLatch();
-                    incrementReadLatchesReleased();
-                    // TODO: do we need to un-pin and pin again?
-                    bufferCache.unpin(node);
-                    incrementUnpins();
+			if (!isLeaf) {
+				// findBestChild must be called *before* getBestChildPageId
+				ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
+				int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
 
-                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                    incrementPins();
-                    node.acquireWriteLatch();
-                    incrementWriteLatchesAcquired();
-                    ctx.interiorFrame.setPage(node);
-                    writeLatched = true;
+				if (!writeLatched) {
+					node.releaseReadLatch();
+					incrementReadLatchesReleased();
+					// TODO: do we need to un-pin and pin again?
+					bufferCache.unpin(node);
+					incrementUnpins();
 
-                    if (ctx.interiorFrame.getPageLsn() != pageLsn) {
-                        // The page was changed while we unlocked it; thus,
-                        // retry (re-choose best child)
+					node = bufferCache.pin(
+							BufferedFileHandle.getDiskPageId(fileId, pageId),
+							false);
+					incrementPins();
+					node.acquireWriteLatch();
+					incrementWriteLatchesAcquired();
+					ctx.interiorFrame.setPage(node);
+					writeLatched = true;
 
-                        ctx.pathList.moveLast();
-                        continue;
-                    }
-                }
+					if (ctx.interiorFrame.getPageLsn() != pageLsn) {
+						// The page was changed while we unlocked it; thus,
+						// retry (re-choose best child)
 
-                // We don't need to reset the frameTuple because it is
-                // already pointing to the best child
-                ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
+						ctx.pathList.moveLast();
+						continue;
+					}
+				}
 
-                node.releaseWriteLatch();
-                incrementWriteLatchesReleased();
-                bufferCache.unpin(node);
-                incrementUnpins();
-                writeLatched = false;
+				// We don't need to reset the frameTuple because it is
+				// already pointing to the best child
+				ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
 
-                pageId = childPageId;
-                parentLsn = pageLsn;
-            } else {
-                ctx.leafFrame.setPage(node);
-                return node;
-            }
-        }
-    }
+				node.releaseWriteLatch();
+				incrementWriteLatchesReleased();
+				bufferCache.unpin(node);
+				incrementUnpins();
+				writeLatched = false;
 
-    private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
-            throws Exception {
-        FrameOpSpaceStatus spaceStatus;
-        if (!isLeaf) {
-            spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
-        } else {
-            spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
-        }
+				pageId = childPageId;
+				parentLsn = pageLsn;
+			} else {
+				ctx.leafFrame.setPage(node);
+				return node;
+			}
+		}
+	}
 
-        switch (spaceStatus) {
-            case SUFFICIENT_CONTIGUOUS_SPACE: {
-                if (!isLeaf) {
-                    ctx.interiorFrame.insert(tuple, cmp, -1);
-                    incrementGlobalNsn();
-                    ctx.interiorFrame.setPageLsn(getGlobalNsn());
-                } else {
-                    ctx.leafFrame.insert(tuple, cmp, -1);
-                    incrementGlobalNsn();
-                    ctx.leafFrame.setPageLsn(getGlobalNsn());
-                }
-                ctx.splitKey.reset();
-                break;
-            }
+	private void insertTuple(ICachedPage node, int pageId,
+			ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
+			throws Exception {
+		FrameOpSpaceStatus spaceStatus;
+		if (!isLeaf) {
+			spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+		} else {
+			spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+		}
 
-            case SUFFICIENT_SPACE: {
-                if (!isLeaf) {
-                    ctx.interiorFrame.compact(cmp);
-                    ctx.interiorFrame.insert(tuple, cmp, -1);
-                    incrementGlobalNsn();
-                    ctx.interiorFrame.setPageLsn(getGlobalNsn());
-                } else {
-                    ctx.leafFrame.compact(cmp);
-                    ctx.leafFrame.insert(tuple, cmp, -1);
-                    incrementGlobalNsn();
-                    ctx.leafFrame.setPageLsn(getGlobalNsn());
-                }
-                ctx.splitKey.reset();
-                break;
-            }
+		switch (spaceStatus) {
+		case SUFFICIENT_CONTIGUOUS_SPACE: {
+			if (!isLeaf) {
+				ctx.interiorFrame.insert(tuple, cmp, -1);
+				incrementGlobalNsn();
+				ctx.interiorFrame.setPageLsn(getGlobalNsn());
+			} else {
+				ctx.leafFrame.insert(tuple, cmp, -1);
+				incrementGlobalNsn();
+				ctx.leafFrame.setPageLsn(getGlobalNsn());
+			}
+			ctx.splitKey.reset();
+			break;
+		}
 
-            case INSUFFICIENT_SPACE: {
-                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
-                incrementPins();
-                rightNode.acquireWriteLatch();
-                incrementWriteLatchesAcquired();
+		case SUFFICIENT_SPACE: {
+			if (!isLeaf) {
+				ctx.interiorFrame.compact(cmp);
+				ctx.interiorFrame.insert(tuple, cmp, -1);
+				incrementGlobalNsn();
+				ctx.interiorFrame.setPageLsn(getGlobalNsn());
+			} else {
+				ctx.leafFrame.compact(cmp);
+				ctx.leafFrame.insert(tuple, cmp, -1);
+				incrementGlobalNsn();
+				ctx.leafFrame.setPageLsn(getGlobalNsn());
+			}
+			ctx.splitKey.reset();
+			break;
+		}
 
-                try {
-                    IRTreeFrame rightFrame;
-                    int ret;
-                    numOfPages++; // debug
-                    if (!isLeaf) {
-                        splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
-                        rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
-                        rightFrame.setPage(rightNode);
-                        rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
-                        rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-                        ret = ctx.interiorFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
-                        ctx.interiorFrame.setRightPage(rightPageId);
-                        rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
-                        incrementGlobalNsn();
-                        int newNsn = getGlobalNsn();
-                        rightFrame.setPageLsn(newNsn);
-                        ctx.interiorFrame.setPageNsn(newNsn);
-                        ctx.interiorFrame.setPageLsn(newNsn);
-                    } else {
-                        splitsByLevel[0]++; // debug
-                        rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
-                        rightFrame.setPage(rightNode);
-                        rightFrame.initBuffer((byte) 0);
-                        rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-                        ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
-                        ctx.leafFrame.setRightPage(rightPageId);
-                        rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
-                        incrementGlobalNsn();
-                        int newNsn = getGlobalNsn();
-                        rightFrame.setPageLsn(newNsn);
-                        ctx.leafFrame.setPageNsn(newNsn);
-                        ctx.leafFrame.setPageLsn(newNsn);
-                    }
-                    if (ret != 0) {
-                        ctx.splitKey.reset();
-                    } else {
-                        ctx.splitKey.setPages(pageId, rightPageId);
-                    }
-                    if (pageId == rootPage) {
-                        rootSplits++; // debug
-                        splitsByLevel[currentLevel]++;
-                        currentLevel++;
+		case INSUFFICIENT_SPACE: {
+			int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+			ICachedPage rightNode = bufferCache
+					.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+							true);
+			incrementPins();
+			rightNode.acquireWriteLatch();
+			incrementWriteLatchesAcquired();
 
-                        int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
-                        ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
-                                true);
-                        incrementPins();
-                        newLeftNode.acquireWriteLatch();
-                        incrementWriteLatchesAcquired();
-                        try {
-                            // copy left child to new left child
-                            System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0,
-                                    newLeftNode.getBuffer().capacity());
+			try {
+				IRTreeFrame rightFrame;
+				int ret;
+				numOfPages++; // debug
+				if (!isLeaf) {
+					splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
+					rightFrame = (IRTreeFrame) interiorFrameFactory
+							.createFrame();
+					rightFrame.setPage(rightNode);
+					rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+					rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+					ret = ctx.interiorFrame.split(rightFrame, tuple, cmp,
+							ctx.splitKey);
+					ctx.interiorFrame.setRightPage(rightPageId);
+					rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
+					incrementGlobalNsn();
+					int newNsn = getGlobalNsn();
+					rightFrame.setPageLsn(newNsn);
+					ctx.interiorFrame.setPageNsn(newNsn);
+					ctx.interiorFrame.setPageLsn(newNsn);
+				} else {
+					splitsByLevel[0]++; // debug
+					rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+					rightFrame.setPage(rightNode);
+					rightFrame.initBuffer((byte) 0);
+					rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+					ret = ctx.leafFrame.split(rightFrame, tuple, cmp,
+							ctx.splitKey);
+					ctx.leafFrame.setRightPage(rightPageId);
+					rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
+					incrementGlobalNsn();
+					int newNsn = getGlobalNsn();
+					rightFrame.setPageLsn(newNsn);
+					ctx.leafFrame.setPageNsn(newNsn);
+					ctx.leafFrame.setPageLsn(newNsn);
+				}
+				if (ret != 0) {
+					ctx.splitKey.reset();
+				} else {
+					ctx.splitKey.setPages(pageId, rightPageId);
+				}
+				if (pageId == rootPage) {
+					rootSplits++; // debug
+					splitsByLevel[currentLevel]++;
+					currentLevel++;
 
-                            // initialize new root (leftNode becomes new root)
-                            ctx.interiorFrame.setPage(node);
-                            ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
+					int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+					ICachedPage newLeftNode = bufferCache
+							.pin(BufferedFileHandle.getDiskPageId(fileId,
+									newLeftId), true);
+					incrementPins();
+					newLeftNode.acquireWriteLatch();
+					incrementWriteLatchesAcquired();
+					try {
+						// copy left child to new left child
+						System.arraycopy(node.getBuffer().array(), 0,
+								newLeftNode.getBuffer().array(), 0, newLeftNode
+										.getBuffer().capacity());
+
+						// initialize new root (leftNode becomes new root)
+						ctx.interiorFrame.setPage(node);
+						ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame
+								.getLevel() + 1));
 
-                            ctx.splitKey.setLeftPage(newLeftId);
+						ctx.splitKey.setLeftPage(newLeftId);
 
-                            ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), cmp, -1);
-                            ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), cmp, -1);
+						ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(),
+								cmp, -1);
+						ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(),
+								cmp, -1);
 
-                            incrementGlobalNsn();
-                            int newNsn = getGlobalNsn();
-                            ctx.interiorFrame.setPageLsn(newNsn);
-                            ctx.interiorFrame.setPageNsn(newNsn);
-                        } finally {
-                            newLeftNode.releaseWriteLatch();
-                            incrementWriteLatchesReleased();
-                            bufferCache.unpin(newLeftNode);
-                            incrementUnpins();
-                        }
+						incrementGlobalNsn();
+						int newNsn = getGlobalNsn();
+						ctx.interiorFrame.setPageLsn(newNsn);
+						ctx.interiorFrame.setPageNsn(newNsn);
+					} finally {
+						newLeftNode.releaseWriteLatch();
+						incrementWriteLatchesReleased();
+						bufferCache.unpin(newLeftNode);
+						incrementUnpins();
+					}
 
-                        ctx.splitKey.reset();
-                    }
-                } finally {
-                    rightNode.releaseWriteLatch();
-                    incrementWriteLatchesReleased();
-                    bufferCache.unpin(rightNode);
-                    incrementUnpins();
-                }
-                break;
-            }
-        }
-    }
+					ctx.splitKey.reset();
+				}
+			} finally {
+				rightNode.releaseWriteLatch();
+				incrementWriteLatchesReleased();
+				bufferCache.unpin(rightNode);
+				incrementUnpins();
+			}
+			break;
+		}
+		}
+	}
 
-    public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
-        int parentId = ctx.pathList.getLastPageId();
-        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-        incrementPins();
-        parentNode.acquireWriteLatch();
-        incrementWriteLatchesAcquired();
-        ctx.interiorFrame.setPage(parentNode);
-        boolean foundParent = true;
+	public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
+		int parentId = ctx.pathList.getLastPageId();
+		ICachedPage parentNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+		incrementPins();
+		parentNode.acquireWriteLatch();
+		incrementWriteLatchesAcquired();
+		ctx.interiorFrame.setPage(parentNode);
+		boolean foundParent = true;
 
-        if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
-            foundParent = false;
-            while (true) {
-                if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp) != -1) {
-                    // found the parent
-                    foundParent = true;
-                    break;
-                }
-                int rightPage = ctx.interiorFrame.getRightPage();
-                parentNode.releaseWriteLatch();
-                incrementWriteLatchesReleased();
-                bufferCache.unpin(parentNode);
-                incrementUnpins();
+		if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+			foundParent = false;
+			while (true) {
+				if (ctx.interiorFrame.findTupleByPointer(
+						ctx.splitKey.getLeftTuple(), cmp) != -1) {
+					// found the parent
+					foundParent = true;
+					break;
+				}
+				int rightPage = ctx.interiorFrame.getRightPage();
+				parentNode.releaseWriteLatch();
+				incrementWriteLatchesReleased();
+				bufferCache.unpin(parentNode);
+				incrementUnpins();
 
-                if (rightPage == -1) {
-                    break;
-                }
+				if (rightPage == -1) {
+					break;
+				}
 
-                parentId = rightPage;
-                parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-                incrementPins();
-                parentNode.acquireWriteLatch();
-                incrementWriteLatchesAcquired();
-                ctx.interiorFrame.setPage(parentNode);
-            }
-        }
-        if (foundParent) {
-            ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
-            insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
-            ctx.pathList.moveLast();
+				parentId = rightPage;
+				parentNode = bufferCache.pin(
+						BufferedFileHandle.getDiskPageId(fileId, parentId),
+						false);
+				incrementPins();
+				parentNode.acquireWriteLatch();
+				incrementWriteLatchesAcquired();
+				ctx.interiorFrame.setPage(parentNode);
+			}
+		}
+		if (foundParent) {
+			ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
+			insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(),
+					ctx, ctx.interiorFrame.isLeaf());
+			ctx.pathList.moveLast();
 
-            parentNode.releaseWriteLatch();
-            incrementWriteLatchesReleased();
-            bufferCache.unpin(parentNode);
-            incrementUnpins();
-            return;
-        }
+			parentNode.releaseWriteLatch();
+			incrementWriteLatchesReleased();
+			bufferCache.unpin(parentNode);
+			incrementUnpins();
+			return;
+		}
 
-        // very rare situation when the there is a root split, do an exhaustive
-        // breadth-first traversal looking for the parent tuple
+		// very rare situation when the there is a root split, do an exhaustive
+		// breadth-first traversal looking for the parent tuple
 
-        ctx.pathList.clear();
-        ctx.traverseList.clear();
-        findPath(ctx);
-        updateParentForInsert(ctx);
-    }
+		ctx.pathList.clear();
+		ctx.traverseList.clear();
+		findPath(ctx);
+		updateParentForInsert(ctx);
+	}
 
-    public void findPath(RTreeOpContext ctx) throws Exception {
-        int pageId = rootPage;
-        int parentIndex = -1;
-        int parentLsn = 0;
-        int pageLsn, pageIndex;
-        ctx.traverseList.add(pageId, -1, parentIndex);
-        while (!ctx.traverseList.isLast()) {
-            pageId = ctx.traverseList.getFirstPageId();
-            parentIndex = ctx.traverseList.getFirstPageIndex();
+	public void findPath(RTreeOpContext ctx) throws Exception {
+		int pageId = rootPage;
+		int parentIndex = -1;
+		int parentLsn = 0;
+		int pageLsn, pageIndex;
+		ctx.traverseList.add(pageId, -1, parentIndex);
+		while (!ctx.traverseList.isLast()) {
+			pageId = ctx.traverseList.getFirstPageId();
+			parentIndex = ctx.traverseList.getFirstPageIndex();
 
-            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            incrementPins();
-            node.acquireReadLatch();
-            incrementReadLatchesAcquired();
-            ctx.interiorFrame.setPage(node);
-            pageLsn = ctx.interiorFrame.getPageLsn();
-            pageIndex = ctx.traverseList.first();
-            ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+			ICachedPage node = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+			incrementPins();
+			node.acquireReadLatch();
+			incrementReadLatchesAcquired();
+			ctx.interiorFrame.setPage(node);
+			pageLsn = ctx.interiorFrame.getPageLsn();
+			pageIndex = ctx.traverseList.first();
+			ctx.traverseList.setPageLsn(pageIndex, pageLsn);
 
-            ctx.traverseList.moveFirst();
+			ctx.traverseList.moveFirst();
 
-            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                int rightPage = ctx.interiorFrame.getRightPage();
-                if (rightPage != -1) {
-                    ctx.traverseList.add(rightPage, -1, parentIndex);
-                }
-            }
-            parentLsn = pageLsn;
+			if (pageId != rootPage
+					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
+				int rightPage = ctx.interiorFrame.getRightPage();
+				if (rightPage != -1) {
+					ctx.traverseList.add(rightPage, -1, parentIndex);
+				}
+			}
+			parentLsn = pageLsn;
 
-            if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex, cmp) != -1) {
-                fillPath(ctx, pageIndex);
+			if (ctx.interiorFrame.findTupleByPointer(
+					ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
+					cmp) != -1) {
+				fillPath(ctx, pageIndex);
 
-                node.releaseReadLatch();
-                incrementReadLatchesReleased();
-                bufferCache.unpin(node);
-                incrementUnpins();
-                return;
-            }
-            node.releaseReadLatch();
-            incrementReadLatchesReleased();
-            bufferCache.unpin(node);
-            incrementUnpins();
-        }
-    }
+				node.releaseReadLatch();
+				incrementReadLatchesReleased();
+				bufferCache.unpin(node);
+				incrementUnpins();
+				return;
+			}
+			node.releaseReadLatch();
+			incrementReadLatchesReleased();
+			bufferCache.unpin(node);
+			incrementUnpins();
+		}
+	}
 
-    public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
-        if (pageIndex != -1) {
-            fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
-            ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
-        }
-    }
+	public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
+		if (pageIndex != -1) {
+			fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
+			ctx.pathList.add(ctx.traverseList.getPageId(pageIndex),
+					ctx.traverseList.getPageLsn(pageIndex), -1);
+		}
+	}
 
-    @Override
-    public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
-        RTreeOpContext ctx = (RTreeOpContext) ictx;
-        ctx.reset();
-        ctx.setTuple(tuple);
-        ctx.splitKey.reset();
-        ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
-        ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-        ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+	@Override
+	public void delete(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception {
+		RTreeOpContext ctx = (RTreeOpContext) ictx;
+		ctx.reset();
+		ctx.setTuple(tuple);
+		ctx.splitKey.reset();
+		ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+		ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+		ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
 
-        int tupleIndex = findTupleToDelete(ctx);
+		int tupleIndex = findTupleToDelete(ctx);
 
-        if (tupleIndex != -1) {
-            int pageId = ctx.pathList.getLastPageId();
-            ctx.pathList.moveLast();
-            deleteTuple(pageId, tupleIndex, ctx);
+		if (tupleIndex != -1) {
+			int pageId = ctx.pathList.getLastPageId();
+			ctx.pathList.moveLast();
+			deleteTuple(pageId, tupleIndex, ctx);
 
-            while (true) {
-                if (ctx.splitKey.getLeftPageBuffer() != null) {
-                    updateParentForDelete(ctx);
-                } else {
-                    break;
-                }
-            }
+			while (true) {
+				if (ctx.splitKey.getLeftPageBuffer() != null) {
+					updateParentForDelete(ctx);
+				} else {
+					break;
+				}
+			}
 
-            ctx.leafFrame.getPage().releaseWriteLatch();
-            incrementWriteLatchesReleased();
-            bufferCache.unpin(ctx.leafFrame.getPage());
-            incrementUnpins();
-        }
-    }
+			ctx.leafFrame.getPage().releaseWriteLatch();
+			incrementWriteLatchesReleased();
+			bufferCache.unpin(ctx.leafFrame.getPage());
+			incrementUnpins();
+		}
+	}
 
-    public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
-        int parentId = ctx.pathList.getLastPageId();
-        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-        incrementPins();
-        parentNode.acquireWriteLatch();
-        incrementWriteLatchesAcquired();
-        ctx.interiorFrame.setPage(parentNode);
-        boolean foundParent = true;
-        int tupleIndex = -1;
+	public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
+		int parentId = ctx.pathList.getLastPageId();
+		ICachedPage parentNode = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+		incrementPins();
+		parentNode.acquireWriteLatch();
+		incrementWriteLatchesAcquired();
+		ctx.interiorFrame.setPage(parentNode);
+		boolean foundParent = true;
+		int tupleIndex = -1;
 
-        if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
-            foundParent = false;
-            while (true) {
-                tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
-                if (tupleIndex != -1) {
-                    // found the parent
-                    foundParent = true;
-                    break;
-                }
-                int rightPage = ctx.interiorFrame.getRightPage();
-                parentNode.releaseWriteLatch();
-                incrementWriteLatchesReleased();
-                bufferCache.unpin(parentNode);
-                incrementUnpins();
+		if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+			foundParent = false;
+			while (true) {
+				tupleIndex = ctx.interiorFrame.findTupleByPointer(
+						ctx.splitKey.getLeftTuple(), cmp);
+				if (tupleIndex != -1) {
+					// found the parent
+					foundParent = true;
+					break;
+				}
+				int rightPage = ctx.interiorFrame.getRightPage();
+				parentNode.releaseWriteLatch();
+				incrementWriteLatchesReleased();
+				bufferCache.unpin(parentNode);
+				incrementUnpins();
 
-                if (rightPage == -1) {
-                    break;
-                }
+				if (rightPage == -1) {
+					break;
+				}
 
-                parentId = rightPage;
-                parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-                incrementPins();
-                parentNode.acquireWriteLatch();
-                incrementWriteLatchesAcquired();
-                ctx.interiorFrame.setPage(parentNode);
-            }
-        }
-        if (foundParent) {
-            if (tupleIndex == -1) {
-                tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
-            }
-            boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+				parentId = rightPage;
+				parentNode = bufferCache.pin(
+						BufferedFileHandle.getDiskPageId(fileId, parentId),
+						false);
+				incrementPins();
+				parentNode.acquireWriteLatch();
+				incrementWriteLatchesAcquired();
+				ctx.interiorFrame.setPage(parentNode);
+			}
+		}
+		if (foundParent) {
+			if (tupleIndex == -1) {
+				tupleIndex = ctx.interiorFrame.findTupleByPointer(
+						ctx.splitKey.getLeftTuple(), cmp);
+			}
+			boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(
+					ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
 
-            if (recomputeMBR) {
-                ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
-                ctx.pathList.moveLast();
+			if (recomputeMBR) {
+				ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(),
+						tupleIndex, cmp);
+				ctx.pathList.moveLast();
 
-                incrementGlobalNsn();
-                ctx.interiorFrame.setPageLsn(getGlobalNsn());
+				incrementGlobalNsn();
+				ctx.interiorFrame.setPageLsn(getGlobalNsn());
 
-                ctx.splitKey.reset();
-                if (!ctx.pathList.isEmpty()) {
-                    ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
-                    ctx.splitKey.setLeftPage(parentId);
-                }
-            } else {
-                ctx.pathList.moveLast();
-                ctx.splitKey.reset();
-            }
+				ctx.splitKey.reset();
+				if (!ctx.pathList.isEmpty()) {
+					ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
+					ctx.splitKey.setLeftPage(parentId);
+				}
+			} else {
+				ctx.pathList.moveLast();
+				ctx.splitKey.reset();
+			}
 
-            parentNode.releaseWriteLatch();
-            incrementWriteLatchesReleased();
-            bufferCache.unpin(parentNode);
-            incrementUnpins();
-            return;
-        }
+			parentNode.releaseWriteLatch();
+			incrementWriteLatchesReleased();
+			bufferCache.unpin(parentNode);
+			incrementUnpins();
+			return;
+		}
 
-        // very rare situation when the there is a root split, do an exhaustive
-        // breadth-first traversal looking for the parent tuple
+		// very rare situation when the there is a root split, do an exhaustive
+		// breadth-first traversal looking for the parent tuple
 
-        ctx.pathList.clear();
-        ctx.traverseList.clear();
-        findPath(ctx);
-        updateParentForDelete(ctx);
-    }
+		ctx.pathList.clear();
+		ctx.traverseList.clear();
+		findPath(ctx);
+		updateParentForDelete(ctx);
+	}
 
-    public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
+	public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
 
-        ctx.traverseList.add(rootPage, -1, -1);
-        ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
+		ctx.traverseList.add(rootPage, -1, -1);
+		ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
 
-        while (!ctx.pathList.isEmpty()) {
-            int pageId = ctx.pathList.getLastPageId();
-            int parentLsn = ctx.pathList.getLastPageLsn();
-            int pageIndex = ctx.pathList.getLastPageIndex();
-            ctx.pathList.moveLast();
-            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            incrementPins();
-            node.acquireReadLatch();
-            incrementReadLatchesAcquired();
-            ctx.interiorFrame.setPage(node);
-            boolean isLeaf = ctx.interiorFrame.isLeaf();
-            int pageLsn = ctx.interiorFrame.getPageLsn();
-            int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
-            ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+		while (!ctx.pathList.isEmpty()) {
+			int pageId = ctx.pathList.getLastPageId();
+			int parentLsn = ctx.pathList.getLastPageLsn();
+			int pageIndex = ctx.pathList.getLastPageIndex();
+			ctx.pathList.moveLast();
+			ICachedPage node = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+			incrementPins();
+			node.acquireReadLatch();
+			incrementReadLatchesAcquired();
+			ctx.interiorFrame.setPage(node);
+			boolean isLeaf = ctx.interiorFrame.isLeaf();
+			int pageLsn = ctx.interiorFrame.getPageLsn();
+			int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
+			ctx.traverseList.setPageLsn(pageIndex, pageLsn);
 
-            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
-                // Concurrent split detected, we need to visit the right page
-                int rightPage = ctx.interiorFrame.getRightPage();
-                if (rightPage != -1) {
-                    ctx.traverseList.add(rightPage, -1, parentIndex);
-                    ctx.pathList.add(rightPage, parentLsn, ctx.traverseList.size() - 1);
-                }
-            }
+			if (pageId != rootPage
+					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
+				// Concurrent split detected, we need to visit the right page
+				int rightPage = ctx.interiorFrame.getRightPage();
+				if (rightPage != -1) {
+					ctx.traverseList.add(rightPage, -1, parentIndex);
+					ctx.pathList.add(rightPage, parentLsn,
+							ctx.traverseList.size() - 1);
+				}
+			}
 
-            if (!isLeaf) {
-                for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
-                    int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
-                    if (childPageId != -1) {
-                        ctx.traverseList.add(childPageId, -1, pageIndex);
-                        ctx.pathList.add(childPageId, pageLsn, ctx.traverseList.size() - 1);
-                    }
-                }
-            } else {
-                ctx.leafFrame.setPage(node);
-                int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
-                if (tupleIndex != -1) {
+			if (!isLeaf) {
+				for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
+					int childPageId = ctx.interiorFrame
+							.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
+					if (childPageId != -1) {
+						ctx.traverseList.add(childPageId, -1, pageIndex);
+						ctx.pathList.add(childPageId, pageLsn,
+								ctx.traverseList.size() - 1);
+					}
+				}
+			} else {
+				ctx.leafFrame.setPage(node);
+				int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+				if (tupleIndex != -1) {
 
-                    node.releaseReadLatch();
-                    incrementReadLatchesReleased();
-                    bufferCache.unpin(node);
-                    incrementUnpins();
+					node.releaseReadLatch();
+					incrementReadLatchesReleased();
+					bufferCache.unpin(node);
+					incrementUnpins();
 
-                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                    incrementPins();
-                    node.acquireWriteLatch();
-                    incrementWriteLatchesAcquired();
-                    ctx.leafFrame.setPage(node);
+					node = bufferCache.pin(
+							BufferedFileHandle.getDiskPageId(fileId, pageId),
+							false);
+					incrementPins();
+					node.acquireWriteLatch();
+					incrementWriteLatchesAcquired();
+					ctx.leafFrame.setPage(node);
 
-                    if (ctx.leafFrame.getPageLsn() != pageLsn) {
-                        // The page was changed while we unlocked it
+					if (ctx.leafFrame.getPageLsn() != pageLsn) {
+						// The page was changed while we unlocked it
 
-                        tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
-                        if (tupleIndex == -1) {
-                            ctx.traverseList.add(pageId, -1, parentIndex);
-                            ctx.pathList.add(pageId, parentLsn, ctx.traverseList.size() - 1);
+						tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple,
+								cmp);
+						if (tupleIndex == -1) {
+							ctx.traverseList.add(pageId, -1, parentIndex);
+							ctx.pathList.add(pageId, parentLsn,
+									ctx.traverseList.size() - 1);
 
-                            node.releaseWriteLatch();
-                            incrementWriteLatchesReleased();
-                            bufferCache.unpin(node);
-                            incrementUnpins();
-                            continue;
-                        } else {
-                            ctx.pathList.clear();
-                            fillPath(ctx, pageIndex);
-                            return tupleIndex;
-                        }
-                    } else {
-                        ctx.pathList.clear();
-                        fillPath(ctx, pageIndex);
-                        return tupleIndex;
-                    }
-                }
-            }
-            node.releaseReadLatch();
-            incrementReadLatchesReleased();
-            bufferCache.unpin(node);
-            incrementUnpins();
-        }
-        return -1;
-    }
+							node.releaseWriteLatch();
+							incrementWriteLatchesReleased();
+							bufferCache.unpin(node);
+							incrementUnpins();
+							continue;
+						} else {
+							ctx.pathList.clear();
+							fillPath(ctx, pageIndex);
+							return tupleIndex;
+						}
+					} else {
+						ctx.pathList.clear();
+						fillPath(ctx, pageIndex);
+						return tupleIndex;
+					}
+				}
+			}
+			node.releaseReadLatch();
+			incrementReadLatchesReleased();
+			bufferCache.unpin(node);
+			incrementUnpins();
+		}
+		return -1;
+	}
 
-    public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx) throws Exception {
-        ctx.leafFrame.delete(tupleIndex, cmp);
-        incrementGlobalNsn();
-        ctx.leafFrame.setPageLsn(getGlobalNsn());
+	public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx)
+			throws Exception {
+		ctx.leafFrame.delete(tupleIndex, cmp);
+		incrementGlobalNsn();
+		ctx.leafFrame.setPageLsn(getGlobalNsn());
 
-        // if the page is empty, just leave it there for future inserts
-        if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
-            ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
-            ctx.splitKey.setLeftPage(pageId);
-        }
-    }
+		// if the page is empty, just leave it there for future inserts
+		if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
+			ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
+			ctx.splitKey.setLeftPage(pageId);
+		}
+	}
 
-    public void search(ITreeIndexCursor cursor, SearchPredicate pred, RTreeOpContext ctx) throws Exception {
-        ctx.reset();
-        ctx.cursor = cursor;
+	public void search(ITreeIndexCursor cursor, SearchPredicate pred,
+			RTreeOpContext ctx) throws Exception {
+		ctx.reset();
+		ctx.cursor = cursor;
 
-        cursor.setBufferCache(bufferCache);
-        cursor.setFileId(fileId);
-        ctx.cursorInitialState.setRootPage(rootPage);
-        ctx.cursor.open(ctx.cursorInitialState, pred);
-    }
+		cursor.setBufferCache(bufferCache);
+		cursor.setFileId(fileId);
+		ctx.cursorInitialState.setRootPage(rootPage);
+		ctx.cursor.open(ctx.cursorInitialState, pred);
+	}
 
-    public ITreeIndexFrameFactory getInteriorFrameFactory() {
-        return interiorFrameFactory;
-    }
+	public ITreeIndexFrameFactory getInteriorFrameFactory() {
+		return interiorFrameFactory;
+	}
 
-    public ITreeIndexFrameFactory getLeafFrameFactory() {
-        return leafFrameFactory;
-    }
+	public ITreeIndexFrameFactory getLeafFrameFactory() {
+		return leafFrameFactory;
+	}
 
-    public MultiComparator getCmp() {
-        return cmp;
-    }
+	public MultiComparator getCmp() {
+		return cmp;
+	}
 
-    public IFreePageManager getFreePageManager() {
-        return freePageManager;
-    }
+	public IFreePageManager getFreePageManager() {
+		return freePageManager;
+	}
 
-    @Override
-    public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
-        throw new Exception("RTree Update not implemented.");
-    }
+	@Override
+	public void update(ITupleReference tuple, IndexOpContext ictx)
+			throws Exception {
+		throw new Exception("RTree Update not implemented.");
+	}
 
-    public final class BulkLoadContext implements IIndexBulkLoadContext {
+	public final class BulkLoadContext implements IIndexBulkLoadContext {
 
-        public RTreeOpContext insertOpCtx;
+		public RTreeOpContext insertOpCtx;
 
-        public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
-                ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+		public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame,
+				IRTreeFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame)
+				throws HyracksDataException {
 
-            insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-        }
-    }
+			insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame,
+					interiorFrame, metaFrame);
+		}
+	}
 
-    @Override
-    public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
-            ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-        if (loaded)
-            throw new HyracksDataException("Trying to bulk-load RTree but has RTree already been loaded.");
+	@Override
+	public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
+			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+			ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+		if (loaded)
+			throw new HyracksDataException(
+					"Trying to bulk-load RTree but RTree has already been loaded.");
 
-        BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame,
-                metaFrame);
-        return ctx;
-    }
+		BulkLoadContext ctx = new BulkLoadContext(fillFactor,
+				(IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame, metaFrame);
+		return ctx;
+	}
 
-    @Override
-    public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
-        try {
-            insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
-        } catch (Exception e) {
-            throw new HyracksDataException("BulkLoad Error");
-        }
-    }
+	@Override
+	public void bulkLoadAddTuple(IIndexBulkLoadContext ictx,
+			ITupleReference tuple) throws HyracksDataException {
+		try {
+			insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
+		} catch (Exception e) {
+			throw new HyracksDataException("BulkLoad Error");
+		}
+	}
 
-    @Override
-    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
-        loaded = true;
-    }
+	@Override
+	public void endBulkLoad(IIndexBulkLoadContext ictx)
+			throws HyracksDataException {
+		loaded = true;
+	}
 
-    @Override
-    public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
-            IndexOpContext ictx) throws HyracksDataException {
-        TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
-        RTreeOpContext ctx = (RTreeOpContext) ictx;
-        ctx.reset();
+	@Override
+	public void diskOrderScan(ITreeIndexCursor icursor,
+			ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+			IndexOpContext ictx) throws HyracksDataException {
+		TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+		RTreeOpContext ctx = (RTreeOpContext) ictx;
+		ctx.reset();
 
-        int currentPageId = rootPage + 1;
-        int maxPageId = freePageManager.getMaxPage(metaFrame);
+		int currentPageId = rootPage + 1;
+		int maxPageId = freePageManager.getMaxPage(metaFrame);
 
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-        page.acquireReadLatch();
-        cursor.setBufferCache(bufferCache);
-        cursor.setFileId(fileId);
-        cursor.setCurrentPageId(currentPageId);
-        cursor.setMaxPageId(maxPageId);
-        ctx.cursorInitialState.setPage(page);
-        cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
-    }
+		ICachedPage page = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+		page.acquireReadLatch();
+		cursor.setBufferCache(bufferCache);
+		cursor.setFileId(fileId);
+		cursor.setCurrentPageId(currentPageId);
+		cursor.setMaxPageId(maxPageId);
+		ctx.cursorInitialState.setPage(page);
+		cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+	}
 
-    @Override
-    public int getRootPageId() {
-        return rootPage;
-    }
+	@Override
+	public int getRootPageId() {
+		return rootPage;
+	}
 
-    @Override
-    public int getFieldCount() {
-        return cmp.getFieldCount();
-    }
+	@Override
+	public int getFieldCount() {
+		return cmp.getFieldCount();
+	}
 
-    @Override
-    public IndexType getIndexType() {
-        return IndexType.RTREE;
-    }
-}
+	@Override
+	public IndexType getIndexType() {
+		return IndexType.RTREE;
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
index 8b94a04..ac1eb7d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
@@ -20,32 +20,32 @@
 
 public class RTreeCursorInitialState implements ICursorInitialState {
 
-    private PathList pathList;
-    private int rootPage;
-    private ICachedPage page; // for disk order scan
+	private PathList pathList;
+	private int rootPage;
+	private ICachedPage page; // for disk order scan
 
-    public RTreeCursorInitialState(PathList pathList, int rootPage) {
-        this.pathList = pathList;
-        this.rootPage = rootPage;
-    }
+	public RTreeCursorInitialState(PathList pathList, int rootPage) {
+		this.pathList = pathList;
+		this.rootPage = rootPage;
+	}
 
-    public PathList getPathList() {
-        return pathList;
-    }
+	public PathList getPathList() {
+		return pathList;
+	}
 
-    public int getRootPage() {
-        return rootPage;
-    }
+	public int getRootPage() {
+		return rootPage;
+	}
 
-    public void setRootPage(int rootPage) {
-        this.rootPage = rootPage;
-    }
+	public void setRootPage(int rootPage) {
+		this.rootPage = rootPage;
+	}
 
-    public ICachedPage getPage() {
-        return page;
-    }
+	public ICachedPage getPage() {
+		return page;
+	}
 
-    public void setPage(ICachedPage page) {
-        this.page = page;
-    }
+	public void setPage(ICachedPage page) {
+		this.page = page;
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
index ea8af28..fc57019 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -24,51 +24,54 @@
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 
 public final class RTreeOpContext implements IndexOpContext {
-    public final IndexOp op;
-    public final IRTreeInteriorFrame interiorFrame;
-    public final IRTreeLeafFrame leafFrame;
-    public ITreeIndexCursor cursor;
-    public RTreeCursorInitialState cursorInitialState;
-    public final ITreeIndexMetaDataFrame metaFrame;
-    public final RTreeSplitKey splitKey;
-    public ITupleReference tuple;
-    public final PathList pathList; // used to record the pageIds and pageLsns
-                                    // of the visited pages
-    public final PathList traverseList; // used for traversing the tree
-    private static final int initTraverseListSize = 100;
+	public final IndexOp op;
+	public final IRTreeInteriorFrame interiorFrame;
+	public final IRTreeLeafFrame leafFrame;
+	public ITreeIndexCursor cursor;
+	public RTreeCursorInitialState cursorInitialState;
+	public final ITreeIndexMetaDataFrame metaFrame;
+	public final RTreeSplitKey splitKey;
+	public ITupleReference tuple;
+	public final PathList pathList; // used to record the pageIds and pageLsns
+									// of the visited pages
+	public final PathList traverseList; // used for traversing the tree
+	private static final int initTraverseListSize = 100;
 
-    public RTreeOpContext(IndexOp op, IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
-        this.op = op;
-        this.interiorFrame = interiorFrame;
-        this.leafFrame = leafFrame;
-        this.metaFrame = metaFrame;
-        pathList = new PathList(treeHeightHint, treeHeightHint);
-        if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
-            splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter().createTupleReference(), interiorFrame
-                    .getTupleWriter().createTupleReference());
-            traverseList = new PathList(initTraverseListSize, initTraverseListSize);
-        } else {
-            splitKey = null;
-            traverseList = null;
-            cursorInitialState = new RTreeCursorInitialState(pathList, 1);
-        }
-    }
+	public RTreeOpContext(IndexOp op, IRTreeLeafFrame leafFrame,
+			IRTreeInteriorFrame interiorFrame,
+			ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
+		this.op = op;
+		this.interiorFrame = interiorFrame;
+		this.leafFrame = leafFrame;
+		this.metaFrame = metaFrame;
+		pathList = new PathList(treeHeightHint, treeHeightHint);
+		if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
+			splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter()
+					.createTupleReference(), interiorFrame.getTupleWriter()
+					.createTupleReference());
+			traverseList = new PathList(initTraverseListSize,
+					initTraverseListSize);
+		} else {
+			splitKey = null;
+			traverseList = null;
+			cursorInitialState = new RTreeCursorInitialState(pathList, 1);
+		}
+	}
 
-    public ITupleReference getTuple() {
-        return tuple;
-    }
+	public ITupleReference getTuple() {
+		return tuple;
+	}
 
-    public void setTuple(ITupleReference tuple) {
-        this.tuple = tuple;
-    }
+	public void setTuple(ITupleReference tuple) {
+		this.tuple = tuple;
+	}
 
-    public void reset() {
-        if (pathList != null) {
-            pathList.clear();
-        }
-        if (traverseList != null) {
-            traverseList.clear();
-        }
-    }
+	public void reset() {
+		if (pathList != null) {
+			pathList.clear();
+		}
+		if (traverseList != null) {
+			traverseList.clear();
+		}
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
index 86a7bfd..9d37d86 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
@@ -30,172 +30,189 @@
 
 public class RTreeSearchCursor implements ITreeIndexCursor {
 
-    private int fileId = -1;
-    private ICachedPage page = null;
-    private IRTreeInteriorFrame interiorFrame = null;
-    private IRTreeLeafFrame leafFrame = null;
-    private IBufferCache bufferCache = null;
+	private int fileId = -1;
+	private ICachedPage page = null;
+	private IRTreeInteriorFrame interiorFrame = null;
+	private IRTreeLeafFrame leafFrame = null;
+	private IBufferCache bufferCache = null;
 
-    private SearchPredicate pred;
-    private PathList pathList;
-    private int rootPage;
-    ITupleReference searchKey;
+	private SearchPredicate pred;
+	private PathList pathList;
+	private int rootPage;
+	ITupleReference searchKey;
 
-    private int tupleIndex = 0;
-    private int tupleIndexInc = 0;
+	private int tupleIndex = 0;
+	private int tupleIndexInc = 0;
 
-    private MultiComparator cmp;
+	private MultiComparator cmp;
 
-    private ITreeIndexTupleReference frameTuple;
-    private boolean readLatched = false;
+	private ITreeIndexTupleReference frameTuple;
+	private boolean readLatched = false;
 
-    private int pin = 0;
-    private int unpin = 0;
+	private int pin = 0;
+	private int unpin = 0;
 
-    public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame, IRTreeLeafFrame leafFrame) {
-        this.interiorFrame = interiorFrame;
-        this.leafFrame = leafFrame;
-        this.frameTuple = leafFrame.createTupleReference();
-    }
+	public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame,
+			IRTreeLeafFrame leafFrame) {
+		this.interiorFrame = interiorFrame;
+		this.leafFrame = leafFrame;
+		this.frameTuple = leafFrame.createTupleReference();
+	}
 
-    @Override
-    public void close() throws Exception {
-        if (readLatched) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            readLatched = false;
-        }
-        tupleIndex = 0;
-        tupleIndexInc = 0;
-        page = null;
-        pathList = null;
-    }
+	@Override
+	public void close() throws Exception {
+		if (readLatched) {
+			page.releaseReadLatch();
+			bufferCache.unpin(page);
+			readLatched = false;
+		}
+		tupleIndex = 0;
+		tupleIndexInc = 0;
+		page = null;
+		pathList = null;
+	}
 
-    public ITupleReference getTuple() {
-        return frameTuple;
-    }
+	public ITupleReference getTuple() {
+		return frameTuple;
+	}
 
-    @Override
-    public ICachedPage getPage() {
-        return page;
-    }
+	@Override
+	public ICachedPage getPage() {
+		return page;
+	}
 
-    public boolean fetchNextLeafPage() throws HyracksDataException {
-        if (readLatched) {
-            page.releaseReadLatch();
-            bufferCache.unpin(page);
-            unpin++;
-            readLatched = false;
-        }
-        while (!pathList.isEmpty()) {
-            int pageId = pathList.getLastPageId();
-            int parentLsn = pathList.getLastPageLsn();
-            pathList.moveLast();
-            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            pin++;
-            node.acquireReadLatch();
-            readLatched = true;
-            interiorFrame.setPage(node);
-            boolean isLeaf = interiorFrame.isLeaf();
-            int pageLsn = interiorFrame.getPageLsn();
+	public boolean fetchNextLeafPage() throws HyracksDataException {
+		if (readLatched) {
+			page.releaseReadLatch();
+			bufferCache.unpin(page);
+			unpin++;
+			readLatched = false;
+		}
+		while (!pathList.isEmpty()) {
+			int pageId = pathList.getLastPageId();
+			int parentLsn = pathList.getLastPageLsn();
+			pathList.moveLast();
+			ICachedPage node = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+			pin++;
+			node.acquireReadLatch();
+			readLatched = true;
+			interiorFrame.setPage(node);
+			boolean isLeaf = interiorFrame.isLeaf();
+			int pageLsn = interiorFrame.getPageLsn();
 
-            if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
-                // Concurrent split detected, we need to visit the right page
-                int rightPage = interiorFrame.getRightPage();
-                if (rightPage != -1) {
-                    pathList.add(rightPage, parentLsn, -1);
-                }
-            }
+			if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
+				// Concurrent split detected, we need to visit the right page
+				int rightPage = interiorFrame.getRightPage();
+				if (rightPage != -1) {
+					pathList.add(rightPage, parentLsn, -1);
+				}
+			}
 
-            if (!isLeaf) {
-                for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
-                    int childPageId = interiorFrame.getChildPageIdIfIntersect(searchKey, i, cmp);
-                    if (childPageId != -1) {
-                        pathList.add(childPageId, pageLsn, -1);
-                    }
-                }
-            } else {
-                page = node;
-                leafFrame.setPage(page);
-                tupleIndex = 0;
-                return true;
-            }
-            node.releaseReadLatch();
-            readLatched = false;
-            bufferCache.unpin(node);
-            unpin++;
-        }
-        return false;
-    }
+			if (!isLeaf) {
+				for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
+					int childPageId = interiorFrame.getChildPageIdIfIntersect(
+							searchKey, i, cmp);
+					if (childPageId != -1) {
+						pathList.add(childPageId, pageLsn, -1);
+					}
+				}
+			} else {
+				page = node;
+				leafFrame.setPage(page);
+				tupleIndex = 0;
+				return true;
+			}
+			node.releaseReadLatch();
+			readLatched = false;
+			bufferCache.unpin(node);
+			unpin++;
+		}
+		return false;
+	}
 
-    @Override
-    public boolean hasNext() throws Exception {
-        if (page == null) {
-            return false;
-        }
+	@Override
+	public boolean hasNext() throws Exception {
+		if (page == null) {
+			return false;
+		}
 
-        if (tupleIndex == leafFrame.getTupleCount()) {
-            if (!fetchNextLeafPage()) {
-                return false;
-            }
-        }
+		if (tupleIndex == leafFrame.getTupleCount()) {
+			if (!fetchNextLeafPage()) {
+				return false;
+			}
+		}
 
-        do {
-            for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
-                if (leafFrame.intersect(searchKey, i, cmp)) {
-                    frameTuple.resetByTupleIndex(leafFrame, i);
-                    tupleIndexInc = i + 1;
-                    return true;
-                }
-            }
-        } while (fetchNextLeafPage());
-        return false;
-    }
+		do {
+			for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
+				if (leafFrame.intersect(searchKey, i, cmp)) {
+					frameTuple.resetByTupleIndex(leafFrame, i);
+					tupleIndexInc = i + 1;
+					return true;
+				}
+			}
+		} while (fetchNextLeafPage());
+		return false;
+	}
 
-    @Override
-    public void next() throws Exception {
-        tupleIndex = tupleIndexInc;
-    }
+	@Override
+	public void next() throws Exception {
+		tupleIndex = tupleIndexInc;
+	}
 
-    @Override
-    public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws Exception {
-        // in case open is called multiple times without closing
-        if (this.page != null) {
-            this.page.releaseReadLatch();
-            readLatched = false;
-            bufferCache.unpin(this.page);
-            pathList.clear();
-        }
+	@Override
+	public void open(ICursorInitialState initialState,
+			ISearchPredicate searchPred) throws Exception {
+		// in case open is called multiple times without closing
+		if (this.page != null) {
+			this.page.releaseReadLatch();
+			readLatched = false;
+			bufferCache.unpin(this.page);
+			pathList.clear();
+		}
 
-        pathList = ((RTreeCursorInitialState) initialState).getPathList();
-        rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
+		pathList = ((RTreeCursorInitialState) initialState).getPathList();
+		rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
 
-        pred = (SearchPredicate) searchPred;
-        cmp = pred.getLowKeyComparator();
-        searchKey = pred.getSearchKey();
+		pred = (SearchPredicate) searchPred;
+		cmp = pred.getLowKeyComparator();
+		searchKey = pred.getSearchKey();
 
-        pathList.add(this.rootPage, -1, -1);
-        frameTuple.setFieldCount(cmp.getFieldCount());
-        tupleIndex = 0;
-        fetchNextLeafPage();
-    }
+		int maxFieldPos = cmp.getKeyFieldCount() / 2;
+		for (int i = 0; i < maxFieldPos; i++) {
+			int j = maxFieldPos + i;
+			int c = cmp.getComparators()[i].compare(searchKey.getFieldData(i),
+					searchKey.getFieldStart(i), searchKey.getFieldLength(i),
+					searchKey.getFieldData(j), searchKey.getFieldStart(j),
+					searchKey.getFieldLength(j));
+			if (c > 0) {
+				throw new IllegalArgumentException(
+						"The low key point has larger coordinates than the high key point.");
+			}
+		}
 
-    @Override
-    public void reset() {
-        try {
-            close();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-    }
+		pathList.add(this.rootPage, -1, -1);
+		frameTuple.setFieldCount(cmp.getFieldCount());
+		tupleIndex = 0;
+		fetchNextLeafPage();
+	}
 
-    @Override
-    public void setBufferCache(IBufferCache bufferCache) {
-        this.bufferCache = bufferCache;
-    }
+	@Override
+	public void reset() {
+		try {
+			close();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+	}
 
-    @Override
-    public void setFileId(int fileId) {
-        this.fileId = fileId;
-    }
-}
+	@Override
+	public void setBufferCache(IBufferCache bufferCache) {
+		this.bufferCache = bufferCache;
+	}
+
+	@Override
+	public void setFileId(int fileId) {
+		this.fileId = fileId;
+	}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
index f220ea3..ecbcd38 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
@@ -21,132 +21,134 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 
 public class RTreeSplitKey implements ISplitKey {
-    public byte[] leftPageData = null;
-    public ByteBuffer leftPageBuf = null;
-    public ITreeIndexTupleReference leftTuple;
+	public byte[] leftPageData = null;
+	public ByteBuffer leftPageBuf = null;
+	public ITreeIndexTupleReference leftTuple;
 
-    public byte[] rightPageData = null;
-    public ByteBuffer rightPageBuf = null;
-    public ITreeIndexTupleReference rightTuple;
+	public byte[] rightPageData = null;
+	public ByteBuffer rightPageBuf = null;
+	public ITreeIndexTupleReference rightTuple;
 
-    public int keySize = 0;
+	public int keySize = 0;
 
-    public RTreeSplitKey(ITreeIndexTupleReference leftTuple, ITreeIndexTupleReference rightTuple) {
-        this.leftTuple = leftTuple;
-        this.rightTuple = rightTuple;
-    }
+	public RTreeSplitKey(ITreeIndexTupleReference leftTuple,
+			ITreeIndexTupleReference rightTuple) {
+		this.leftTuple = leftTuple;
+		this.rightTuple = rightTuple;
+	}
 
-    public void initData(int keySize) {
-        // try to reuse existing memory from a lower-level split if possible
-        this.keySize = keySize;
-        if (leftPageData != null) {
-            if (leftPageData.length < keySize + 4) {
-                leftPageData = new byte[keySize + 4]; // add 4 for the page
-                leftPageBuf = ByteBuffer.wrap(leftPageData);
-            }
-        } else {
-            leftPageData = new byte[keySize + 4]; // add 4 for the page
-            leftPageBuf = ByteBuffer.wrap(leftPageData);
-        }
-        if (rightPageData != null) {
-            if (rightPageData.length < keySize + 4) {
-                rightPageData = new byte[keySize + 4]; // add 4 for the page
-                rightPageBuf = ByteBuffer.wrap(rightPageData);
-            }
-        } else {
-            rightPageData = new byte[keySize + 4]; // add 4 for the page
-            rightPageBuf = ByteBuffer.wrap(rightPageData);
-        }
+	public void initData(int keySize) {
+		// try to reuse existing memory from a lower-level split if possible
+		this.keySize = keySize;
+		if (leftPageData != null) {
+			if (leftPageData.length < keySize + 4) {
+				leftPageData = new byte[keySize + 4]; // add 4 for the page
+				leftPageBuf = ByteBuffer.wrap(leftPageData);
+			}
+		} else {
+			leftPageData = new byte[keySize + 4]; // add 4 for the page
+			leftPageBuf = ByteBuffer.wrap(leftPageData);
+		}
+		if (rightPageData != null) {
+			if (rightPageData.length < keySize + 4) {
+				rightPageData = new byte[keySize + 4]; // add 4 for the page
+				rightPageBuf = ByteBuffer.wrap(rightPageData);
+			}
+		} else {
+			rightPageData = new byte[keySize + 4]; // add 4 for the page
+			rightPageBuf = ByteBuffer.wrap(rightPageData);
+		}
 
-        leftTuple.resetByTupleOffset(leftPageBuf, 0);
-        rightTuple.resetByTupleOffset(rightPageBuf, 0);
-    }
+		leftTuple.resetByTupleOffset(leftPageBuf, 0);
+		rightTuple.resetByTupleOffset(rightPageBuf, 0);
+	}
 
-    public void resetLeftPage() {
-        leftPageData = null;
-        leftPageBuf = null;
-    }
+	public void resetLeftPage() {
+		leftPageData = null;
+		leftPageBuf = null;
+	}
 
-    public void resetRightPage() {
-        rightPageData = null;
-        rightPageBuf = null;
-    }
+	public void resetRightPage() {
+		rightPageData = null;
+		rightPageBuf = null;
+	}
 
-    public ByteBuffer getLeftPageBuffer() {
-        return leftPageBuf;
-    }
+	public ByteBuffer getLeftPageBuffer() {
+		return leftPageBuf;
+	}
 
-    public ByteBuffer getRightPageBuffer() {
-        return rightPageBuf;
-    }
+	public ByteBuffer getRightPageBuffer() {
+		return rightPageBuf;
+	}
 
-    public ITreeIndexTupleReference getLeftTuple() {
-        return leftTuple;
-    }
+	public ITreeIndexTupleReference getLeftTuple() {
+		return leftTuple;
+	}
 
-    public ITreeIndexTupleReference getRightTuple() {
-        return rightTuple;
-    }
+	public ITreeIndexTupleReference getRightTuple() {
+		return rightTuple;
+	}
 
-    public int getLeftPage() {
-        return leftPageBuf.getInt(keySize);
-    }
+	public int getLeftPage() {
+		return leftPageBuf.getInt(keySize);
+	}
 
-    public int getRightPage() {
-        return rightPageBuf.getInt(keySize);
-    }
+	public int getRightPage() {
+		return rightPageBuf.getInt(keySize);
+	}
 
-    public void setLeftPage(int page) {
-        leftPageBuf.putInt(keySize, page);
-    }
+	public void setLeftPage(int page) {
+		leftPageBuf.putInt(keySize, page);
+	}
 
-    public void setRightPage(int page) {
-        rightPageBuf.putInt(keySize, page);
-    }
+	public void setRightPage(int page) {
+		rightPageBuf.putInt(keySize, page);
+	}
 
-    public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple, ITreeIndexTupleReference copyRightTuple) {
-        RTreeSplitKey copy = new RTreeSplitKey(copyLeftTuple, copyRightTuple);
-        copy.leftPageData = leftPageData.clone();
-        copy.leftPageBuf = ByteBuffer.wrap(copy.leftPageData);
-        copy.leftTuple.setFieldCount(leftTuple.getFieldCount());
-        copy.leftTuple.resetByTupleOffset(copy.leftPageBuf, 0);
+	public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple,
+			ITreeIndexTupleReference copyRightTuple) {
+		RTreeSplitKey copy = new RTreeSplitKey(copyLeftTuple, copyRightTuple);
+		copy.leftPageData = leftPageData.clone();
+		copy.leftPageBuf = ByteBuffer.wrap(copy.leftPageData);
+		copy.leftTuple.setFieldCount(leftTuple.getFieldCount());
+		copy.leftTuple.resetByTupleOffset(copy.leftPageBuf, 0);
 
-        copy.rightPageData = rightPageData.clone();
-        copy.rightPageBuf = ByteBuffer.wrap(copy.rightPageData);
-        copy.rightTuple.setFieldCount(rightTuple.getFieldCount());
-        copy.rightTuple.resetByTupleOffset(copy.rightPageBuf, 0);
-        return copy;
-    }
+		copy.rightPageData = rightPageData.clone();
+		copy.rightPageBuf = ByteBuffer.wrap(copy.rightPageData);
+		copy.rightTuple.setFieldCount(rightTuple.getFieldCount());
+		copy.rightTuple.resetByTupleOffset(copy.rightPageBuf, 0);
+		return copy;
+	}
 
-    @Override
-    public void reset() {
-        leftPageData = null;
-        leftPageBuf = null;
-        rightPageData = null;
-        rightPageBuf = null;
-    }
+	@Override
+	public void reset() {
+		leftPageData = null;
+		leftPageBuf = null;
+		rightPageData = null;
+		rightPageBuf = null;
+	}
 
-    @Override
-    public ByteBuffer getBuffer() {
-        // TODO Auto-generated method stub
-        return null;
-    }
+	@Override
+	public ByteBuffer getBuffer() {
+		// TODO Auto-generated method stub
+		return null;
+	}
 
-    @Override
-    public ITreeIndexTupleReference getTuple() {
-        // TODO Auto-generated method stub
-        return null;
-    }
+	@Override
+	public ITreeIndexTupleReference getTuple() {
+		// TODO Auto-generated method stub
+		return null;
+	}
 
-    @Override
-    public void setPages(int leftPage, int rightPage) {
-        leftPageBuf.putInt(keySize, leftPage);
-        rightPageBuf.putInt(keySize, rightPage);
-    }
+	@Override
+	public void setPages(int leftPage, int rightPage) {
+		leftPageBuf.putInt(keySize, leftPage);
+		rightPageBuf.putInt(keySize, rightPage);
+	}
 
-    @Override
-    public ISplitKey duplicate(ITreeIndexTupleReference copyTuple) {
-        // TODO Auto-generated method stub
-        return null;
-    }
+	@Override
+	public ISplitKey duplicate(ITreeIndexTupleReference copyTuple) {
+		// TODO Auto-generated method stub
+		return null;
+	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
index ef78a9b..019760b 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -16,20 +16,17 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IGenericPrimitiveSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public class Rectangle {
 	private int dim;
 	private double[] low;
 	private double[] high;
-	private IGenericPrimitiveSerializerDeserializer[] recDescSers;
 
-	public Rectangle(int dim,
-			IGenericPrimitiveSerializerDeserializer[] recDescSers) {
+	public Rectangle(int dim) {
 		this.dim = dim;
 		low = new double[this.dim];
 		high = new double[this.dim];
-		this.recDescSers = recDescSers;
 	}
 
 	public int getDim() {
@@ -52,28 +49,26 @@
 		high[i] = value;
 	}
 
-	public void set(ITupleReference tuple) {
+	public void set(ITupleReference tuple, MultiComparator cmp) {
 		for (int i = 0; i < getDim(); i++) {
 			int j = i + getDim();
-			setLow(i,
-					recDescSers[i].getValue(tuple.getFieldData(i),
-							tuple.getFieldStart(i)));
-			setHigh(i,
-					recDescSers[j].getValue(tuple.getFieldData(j),
-							tuple.getFieldStart(j)));
+			setLow(i, cmp.getValueProviders()[i].getValue(
+					tuple.getFieldData(i), tuple.getFieldStart(i)));
+			setHigh(i, cmp.getValueProviders()[j].getValue(
+					tuple.getFieldData(j), tuple.getFieldStart(j)));
 		}
 	}
 
-	public void enlarge(ITupleReference tupleToBeInserted) {
+	public void enlarge(ITupleReference tupleToBeInserted, MultiComparator cmp) {
 		for (int i = 0; i < getDim(); i++) {
 			int j = getDim() + i;
-			double low = recDescSers[i].getValue(
+			double low = cmp.getValueProviders()[i].getValue(
 					tupleToBeInserted.getFieldData(i),
 					tupleToBeInserted.getFieldStart(i));
 			if (getLow(i) > low) {
 				setLow(i, low);
 			}
-			double high = recDescSers[j].getValue(
+			double high = cmp.getValueProviders()[j].getValue(
 					tupleToBeInserted.getFieldData(j),
 					tupleToBeInserted.getFieldStart(j));
 			if (getHigh(i) < high) {
@@ -91,26 +86,6 @@
 		return margin;
 	}
 
-	public double overlappedArea(ITupleReference tuple) {
-		double area = 1.0;
-		double f1, f2;
-
-		for (int i = 0; i < getDim(); i++) {
-			int j = getDim() + i;
-			double low = recDescSers[i].getValue(tuple.getFieldData(i),
-					tuple.getFieldStart(i));
-			double high = recDescSers[j].getValue(tuple.getFieldData(j),
-					tuple.getFieldStart(j));
-			if (getLow(i) > high || getHigh(i) < low) {
-				return 0.0;
-			}
-			f1 = Math.max(getLow(i), low);
-			f2 = Math.min(getHigh(i), high);
-			area *= f2 - f1;
-		}
-		return area;
-	}
-
 	public double overlappedArea(Rectangle rec) {
 		double area = 1.0;
 		double f1, f2;
@@ -119,6 +94,7 @@
 			if (getLow(i) > rec.getHigh(i) || getHigh(i) < rec.getLow(i)) {
 				return 0.0;
 			}
+
 			f1 = Math.max(getLow(i), rec.getLow(i));
 			f2 = Math.min(getHigh(i), rec.getHigh(i));
 			area *= f2 - f1;
@@ -126,18 +102,6 @@
 		return area;
 	}
 
-	public double area(ITupleReference tuple) {
-		double area = 1.0;
-		for (int i = 0; i < getDim(); i++) {
-			int j = getDim() + i;
-			area *= recDescSers[j].getValue(tuple.getFieldData(j),
-					tuple.getFieldStart(j))
-					- recDescSers[i].getValue(tuple.getFieldData(i),
-							tuple.getFieldStart(i));
-		}
-		return area;
-	}
-
 	public double area() {
 		double area = 1.0;
 		for (int i = 0; i < getDim(); i++) {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
index cd3a0ef..c8d5816 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
@@ -21,29 +21,29 @@
 
 public class SearchPredicate implements ISearchPredicate {
 
-    private static final long serialVersionUID = 1L;
+	private static final long serialVersionUID = 1L;
 
-    protected ITupleReference searchKey;
-    protected MultiComparator cmp;
+	protected ITupleReference searchKey;
+	protected MultiComparator cmp;
 
-    public SearchPredicate(ITupleReference searchKey, MultiComparator cmp) {
-        this.searchKey = searchKey;
-        this.cmp = cmp;
-    }
+	public SearchPredicate(ITupleReference searchKey, MultiComparator cmp) {
+		this.searchKey = searchKey;
+		this.cmp = cmp;
+	}
 
-    public ITupleReference getSearchKey() {
-        return searchKey;
-    }
+	public ITupleReference getSearchKey() {
+		return searchKey;
+	}
 
-    public void setSearchKey(ITupleReference searchKey) {
-        this.searchKey = searchKey;
-    }
+	public void setSearchKey(ITupleReference searchKey) {
+		this.searchKey = searchKey;
+	}
 
-    public MultiComparator getLowKeyComparator() {
-        return cmp;
-    }
+	public MultiComparator getLowKeyComparator() {
+		return cmp;
+	}
 
-    public MultiComparator getHighKeyComparator() {
-        return cmp;
-    }
+	public MultiComparator getHighKeyComparator() {
+		return cmp;
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
index 0cb3de8..6cb84a4 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
@@ -18,35 +18,35 @@
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
 
 public class TupleEntry implements Comparable<TupleEntry> {
-    private int tupleIndex;
-    private double value;
+	private int tupleIndex;
+	private double value;
 
-    public TupleEntry() {
-    }
+	public TupleEntry() {
+	}
 
-    public int getTupleIndex() {
-        return tupleIndex;
-    }
+	public int getTupleIndex() {
+		return tupleIndex;
+	}
 
-    public void setTupleIndex(int tupleIndex) {
-        this.tupleIndex = tupleIndex;
-    }
+	public void setTupleIndex(int tupleIndex) {
+		this.tupleIndex = tupleIndex;
+	}
 
-    public double getValue() {
-        return value;
-    }
+	public double getValue() {
+		return value;
+	}
 
-    public void setValue(double value) {
-        this.value = value;
-    }
+	public void setValue(double value) {
+		this.value = value;
+	}
 
-    public int compareTo(TupleEntry tupleEntry) {
-        double cmp = this.getValue() - tupleEntry.getValue();
-        if (cmp > RTreeNSMFrame.doubleEpsilon())
-            return 1;
-        cmp = tupleEntry.getValue() - this.getValue();
-        if (cmp > RTreeNSMFrame.doubleEpsilon())
-            return -1;
-        return 0;
-    }
+	public int compareTo(TupleEntry tupleEntry) {
+		double cmp = this.getValue() - tupleEntry.getValue();
+		if (cmp > RTreeNSMFrame.doubleEpsilon())
+			return 1;
+		cmp = tupleEntry.getValue() - this.getValue();
+		if (cmp > RTreeNSMFrame.doubleEpsilon())
+			return -1;
+		return 0;
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
index 8be8251..7bd2334 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
@@ -19,61 +19,61 @@
 import java.util.Collections;
 
 public class TupleEntryArrayList {
-    private TupleEntry[] data;
-    private int size;
-    private final int growth;
+	private TupleEntry[] data;
+	private int size;
+	private final int growth;
 
-    public TupleEntryArrayList(int initialCapacity, int growth) {
-        data = new TupleEntry[initialCapacity];
-        size = 0;
-        this.growth = growth;
-    }
+	public TupleEntryArrayList(int initialCapacity, int growth) {
+		data = new TupleEntry[initialCapacity];
+		size = 0;
+		this.growth = growth;
+	}
 
-    public int size() {
-        return size;
-    }
+	public int size() {
+		return size;
+	}
 
-    public void add(int tupleIndex, double value) {
-        if (size == data.length) {
-            TupleEntry[] newData = new TupleEntry[data.length + growth];
-            System.arraycopy(data, 0, newData, 0, data.length);
-            data = newData;
-        }
-        if (data[size] == null) {
-            data[size] = new TupleEntry();
-        }
-        data[size].setTupleIndex(tupleIndex);
-        data[size].setValue(value);
-        size++;
-    }
+	public void add(int tupleIndex, double value) {
+		if (size == data.length) {
+			TupleEntry[] newData = new TupleEntry[data.length + growth];
+			System.arraycopy(data, 0, newData, 0, data.length);
+			data = newData;
+		}
+		if (data[size] == null) {
+			data[size] = new TupleEntry();
+		}
+		data[size].setTupleIndex(tupleIndex);
+		data[size].setValue(value);
+		size++;
+	}
 
-    public void removeLast() {
-        if (size > 0)
-            size--;
-    }
+	public void removeLast() {
+		if (size > 0)
+			size--;
+	}
 
-    // WARNING: caller is responsible for checking size > 0
-    public TupleEntry getLast() {
-        return data[size - 1];
-    }
+	// WARNING: caller is responsible for checking size > 0
+	public TupleEntry getLast() {
+		return data[size - 1];
+	}
 
-    public TupleEntry get(int i) {
-        return data[i];
-    }
+	public TupleEntry get(int i) {
+		return data[i];
+	}
 
-    public void clear() {
-        size = 0;
-    }
+	public void clear() {
+		size = 0;
+	}
 
-    public boolean isEmpty() {
-        return size == 0;
-    }
+	public boolean isEmpty() {
+		return size == 0;
+	}
 
-    public void sort(EntriesOrder order, int tupleCount) {
-        if (order == EntriesOrder.ASCENDING) {
-            Arrays.sort(data, 0, tupleCount);
-        } else {
-            Arrays.sort(data, 0, tupleCount, Collections.reverseOrder());
-        }
-    }
+	public void sort(EntriesOrder order, int tupleCount) {
+		if (order == EntriesOrder.ASCENDING) {
+			Arrays.sort(data, 0, tupleCount);
+		} else {
+			Arrays.sort(data, 0, tupleCount, Collections.reverseOrder());
+		}
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
index d2b1c53..7badb8e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
@@ -24,91 +24,104 @@
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
 
 public class UnorderedSlotManager extends AbstractSlotManager {
-    @Override
-    public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
-            FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+	@Override
+	public int findTupleIndex(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
 
-        int maxFieldPos = multiCmp.getKeyFieldCount() / 2;
-        for (int i = 0; i < frame.getTupleCount(); i++) {
-            frameTuple.resetByTupleIndex(frame, i);
+		int maxFieldPos = multiCmp.getKeyFieldCount() / 2;
+		for (int i = 0; i < frame.getTupleCount(); i++) {
+			frameTuple.resetByTupleIndex(frame, i);
 
-            boolean foundTuple = true;
-            for (int j = 0; j < maxFieldPos; j++) {
-                int k = maxFieldPos + j;
-                int c1 = multiCmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-                        frameTuple.getFieldLength(j), searchKey.getFieldData(j), searchKey.getFieldStart(j),
-                        searchKey.getFieldLength(j));
+			boolean foundTuple = true;
+			for (int j = 0; j < maxFieldPos; j++) {
+				int k = maxFieldPos + j;
+				int c1 = multiCmp.getComparators()[j].compare(
+						frameTuple.getFieldData(j),
+						frameTuple.getFieldStart(j),
+						frameTuple.getFieldLength(j),
+						searchKey.getFieldData(j), searchKey.getFieldStart(j),
+						searchKey.getFieldLength(j));
 
-                if (c1 != 0) {
-                    foundTuple = false;
-                    break;
-                }
-                int c2 = multiCmp.getComparators()[k].compare(frameTuple.getFieldData(k), frameTuple.getFieldStart(k),
-                        frameTuple.getFieldLength(k), searchKey.getFieldData(k), searchKey.getFieldStart(k),
-                        searchKey.getFieldLength(k));
-                if (c2 != 0) {
-                    foundTuple = false;
-                    break;
-                }
-            }
-            int remainingFieldCount = multiCmp.getFieldCount() - multiCmp.getKeyFieldCount();
-            for (int j = multiCmp.getKeyFieldCount(); j < multiCmp.getKeyFieldCount() + remainingFieldCount; j++) {
-                if (!compareField(searchKey, frameTuple, j)) {
-                    foundTuple = false;
-                    break;
-                }
-            }
-            if (foundTuple) {
-                return i;
-            }
-        }
-        return -1;
-    }
+				if (c1 != 0) {
+					foundTuple = false;
+					break;
+				}
+				int c2 = multiCmp.getComparators()[k].compare(
+						frameTuple.getFieldData(k),
+						frameTuple.getFieldStart(k),
+						frameTuple.getFieldLength(k),
+						searchKey.getFieldData(k), searchKey.getFieldStart(k),
+						searchKey.getFieldLength(k));
+				if (c2 != 0) {
+					foundTuple = false;
+					break;
+				}
+			}
+			int remainingFieldCount = multiCmp.getFieldCount()
+					- multiCmp.getKeyFieldCount();
+			for (int j = multiCmp.getKeyFieldCount(); j < multiCmp
+					.getKeyFieldCount() + remainingFieldCount; j++) {
+				if (!compareField(searchKey, frameTuple, j)) {
+					foundTuple = false;
+					break;
+				}
+			}
+			if (foundTuple) {
+				return i;
+			}
+		}
+		return -1;
+	}
 
-    public boolean compareField(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, int fIdx) {
-        int searchKeyFieldLength = searchKey.getFieldLength(fIdx);
-        int frameTupleFieldLength = frameTuple.getFieldLength(fIdx);
+	public boolean compareField(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, int fIdx) {
+		int searchKeyFieldLength = searchKey.getFieldLength(fIdx);
+		int frameTupleFieldLength = frameTuple.getFieldLength(fIdx);
 
-        if (searchKeyFieldLength != frameTupleFieldLength) {
-            return false;
-        }
+		if (searchKeyFieldLength != frameTupleFieldLength) {
+			return false;
+		}
 
-        for (int i = 0; i < searchKeyFieldLength; i++) {
-            if (searchKey.getFieldData(fIdx)[i + searchKey.getFieldStart(fIdx)] != frameTuple.getFieldData(fIdx)[i
-                    + frameTuple.getFieldStart(fIdx)]) {
-                return false;
-            }
-        }
-        return true;
-    }
+		for (int i = 0; i < searchKeyFieldLength; i++) {
+			if (searchKey.getFieldData(fIdx)[i + searchKey.getFieldStart(fIdx)] != frameTuple
+					.getFieldData(fIdx)[i + frameTuple.getFieldStart(fIdx)]) {
+				return false;
+			}
+		}
+		return true;
+	}
 
-    @Override
-    public int insertSlot(int tupleIndex, int tupleOff) {
-        int slotOff = getSlotEndOff() - slotSize;
-        setSlot(slotOff, tupleOff);
-        return slotOff;
-    }
+	@Override
+	public int insertSlot(int tupleIndex, int tupleOff) {
+		int slotOff = getSlotEndOff() - slotSize;
+		setSlot(slotOff, tupleOff);
+		return slotOff;
+	}
 
-    public void modifySlot(int slotOff, int tupleOff) {
-        setSlot(slotOff, tupleOff);
-    }
+	public void modifySlot(int slotOff, int tupleOff) {
+		setSlot(slotOff, tupleOff);
+	}
 
-    public void deleteEmptySlots() {
-        int slotOff = getSlotStartOff();
-        while (slotOff >= getSlotEndOff()) {
-            if (frame.getBuffer().getInt(slotOff) == -1) {
-                while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
-                    ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
-                }
-                if (slotOff > getSlotEndOff()) {
-                    System.arraycopy(frame.getBuffer().array(), getSlotEndOff(), frame.getBuffer().array(), slotOff,
-                            slotSize);
-                    ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
-                } else {
-                    break;
-                }
-            }
-            slotOff -= slotSize;
-        }
-    }
+	public void deleteEmptySlots() {
+		int slotOff = getSlotStartOff();
+		while (slotOff >= getSlotEndOff()) {
+			if (frame.getBuffer().getInt(slotOff) == -1) {
+				while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
+					((RTreeNSMFrame) frame)
+							.setTupleCount(frame.getTupleCount() - 1);
+				}
+				if (slotOff > getSlotEndOff()) {
+					System.arraycopy(frame.getBuffer().array(),
+							getSlotEndOff(), frame.getBuffer().array(),
+							slotOff, slotSize);
+					((RTreeNSMFrame) frame)
+							.setTupleCount(frame.getTupleCount() - 1);
+				} else {
+					break;
+				}
+			}
+			slotOff -= slotSize;
+		}
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
index 96820c9..6acac7e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
@@ -23,35 +23,36 @@
 
 public class RTreeTypeAwareTupleWriter extends TypeAwareTupleWriter {
 
-    public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
-        super(typeTraits);
-    }
+	public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+		super(typeTraits);
+	}
 
-    public int writeTupleFields(ITreeIndexTupleReference[] refs, int startField, ByteBuffer targetBuf, int targetOff) {
-        int runner = targetOff;
-        int nullFlagsBytes = getNullFlagsBytes(refs.length);
-        // write null indicator bits
-        for (int i = 0; i < nullFlagsBytes; i++) {
-            targetBuf.put(runner++, (byte) 0);
-        }
+	public int writeTupleFields(ITreeIndexTupleReference[] refs,
+			int startField, ByteBuffer targetBuf, int targetOff) {
+		int runner = targetOff;
+		int nullFlagsBytes = getNullFlagsBytes(refs.length);
+		// write null indicator bits
+		for (int i = 0; i < nullFlagsBytes; i++) {
+			targetBuf.put(runner++, (byte) 0);
+		}
 
-        // write field slots for variable length fields
-        // since the r-tree has fixed length keys, we don't actually need this?
-        encDec.reset(targetBuf.array(), runner);
-        for (int i = startField; i < startField + refs.length; i++) {
-            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-                encDec.encode(refs[i].getFieldLength(i));
-            }
-        }
-        runner = encDec.getPos();
+		// write field slots for variable length fields
+		// since the r-tree has fixed length keys, we don't actually need this?
+		encDec.reset(targetBuf.array(), runner);
+		for (int i = startField; i < startField + refs.length; i++) {
+			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+				encDec.encode(refs[i].getFieldLength(i));
+			}
+		}
+		runner = encDec.getPos();
 
-        // write data
-        for (int i = 0; i < refs.length; i++) {
-            System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i), targetBuf.array(), runner,
-                    refs[i].getFieldLength(i));
-            runner += refs[i].getFieldLength(i);
-        }
-        return runner - targetOff;
+		// write data
+		for (int i = 0; i < refs.length; i++) {
+			System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i),
+					targetBuf.array(), runner, refs[i].getFieldLength(i));
+			runner += refs[i].getFieldLength(i);
+		}
+		return runner - targetOff;
 
-    }
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
index e2e99c7..7d88f2e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
@@ -19,18 +19,19 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 
-public class RTreeTypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
+public class RTreeTypeAwareTupleWriterFactory implements
+		ITreeIndexTupleWriterFactory {
 
-    private static final long serialVersionUID = 1L;
-    private ITypeTrait[] typeTraits;
+	private static final long serialVersionUID = 1L;
+	private ITypeTrait[] typeTraits;
 
-    public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
-        this.typeTraits = typeTraits;
-    }
+	public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+		this.typeTraits = typeTraits;
+	}
 
-    @Override
-    public ITreeIndexTupleWriter createTupleWriter() {
-        return new RTreeTypeAwareTupleWriter(typeTraits);
-    }
+	@Override
+	public ITreeIndexTupleWriter createTupleWriter() {
+		return new RTreeTypeAwareTupleWriter(typeTraits);
+	}
 
 }
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
index b2ab775..25c5370 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
@@ -23,18 +23,20 @@
 
 public abstract class AbstractRTreeTest {
 
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+	protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
+			"ddMMyy-hhmmssSS");
+	protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+	protected final static String sep = System.getProperty("file.separator");
+	protected final static String fileName = tmpDir + sep
+			+ simpleDateFormat.format(new Date());
 
-    protected void print(String str) {
-        System.err.print(str);
-    }
+	protected void print(String str) {
+		System.err.print(str);
+	}
 
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
+	@AfterClass
+	public static void cleanup() throws Exception {
+		File f = new File(fileName);
+		f.deleteOnExit();
+	}
 }
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
index 630c77a..907d5dc 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
@@ -40,6 +40,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
@@ -54,6 +55,8 @@
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
@@ -107,25 +110,24 @@
 		typeTraits[5] = new TypeTrait(4);
 		typeTraits[6] = new TypeTrait(8);
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		// declare value providers
+		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
+		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
+				.createPrimitiveValueProvider();
+		valueProviders[1] = valueProviders[0];
+		valueProviders[2] = valueProviders[0];
+		valueProviders[3] = valueProviders[0];
+
+		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
+				valueProviders);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
-		@SuppressWarnings("rawtypes")
-		ISerializerDeserializer[] recDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
-
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -145,6 +147,15 @@
 		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
 		DataOutput dos = tb.getDataOutput();
 
+		@SuppressWarnings("rawtypes")
+		ISerializerDeserializer[] recDescSers = {
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE };
 		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
 		IFrameTupleAccessor accessor = new FrameTupleAccessor(
 				ctx.getFrameSize(), recDesc);
@@ -251,190 +262,10 @@
 	}
 
 	// create an R-tree of two dimensions
-	// fill the R-tree with random integer key values using insertions
-	// perform ordered scan
-	@Test
-	public void test02() throws Exception {
-
-		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
-				MAX_OPEN_FILES);
-		IBufferCache bufferCache = TestStorageManagerComponentHolder
-				.getBufferCache(ctx);
-		IFileMapProvider fmp = TestStorageManagerComponentHolder
-				.getFileMapProvider(ctx);
-		FileReference file = new FileReference(new File(fileName));
-		bufferCache.createFile(file);
-		int fileId = fmp.lookupFileId(file);
-		bufferCache.openFile(fileId);
-
-		// declare keys
-		int keyFieldCount = 4;
-		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		cmps[1] = cmps[0];
-		cmps[2] = cmps[0];
-		cmps[3] = cmps[0];
-
-		// declare tuple fields
-		int fieldCount = 7;
-		ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-		typeTraits[0] = new TypeTrait(4);
-		typeTraits[1] = new TypeTrait(4);
-		typeTraits[2] = new TypeTrait(4);
-		typeTraits[3] = new TypeTrait(4);
-		typeTraits[4] = new TypeTrait(8);
-		typeTraits[5] = new TypeTrait(4);
-		typeTraits[6] = new TypeTrait(8);
-
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
-				typeTraits);
-
-		@SuppressWarnings("rawtypes")
-		ISerializerDeserializer[] recDescSers = {
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
-
-		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
-		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
-		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-		IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory
-				.createFrame();
-		IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
-		IFreePageManager freePageManager = new LinkedListFreePageManager(
-				bufferCache, fileId, 0, metaFrameFactory);
-
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
-		rtree.open(fileId);
-
-		ByteBuffer hyracksFrame = ctx.allocateFrame();
-		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-		DataOutput dos = tb.getDataOutput();
-
-		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-		IFrameTupleAccessor accessor = new FrameTupleAccessor(
-				ctx.getFrameSize(), recDesc);
-		accessor.reset(hyracksFrame);
-		FrameTupleReference tuple = new FrameTupleReference();
-
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
-
-		Random rnd = new Random();
-		rnd.setSeed(50);
-
-		Random rnd2 = new Random();
-		rnd2.setSeed(50);
-		for (int i = 0; i < 5000; i++) {
-
-			int p1x = rnd.nextInt();
-			int p1y = rnd.nextInt();
-			int p2x = rnd.nextInt();
-			int p2y = rnd.nextInt();
-
-			double pk1 = rnd2.nextDouble();
-			int pk2 = rnd2.nextInt();
-			double pk3 = rnd2.nextDouble();
-
-			tb.reset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(
-					Math.min(p1x, p2x), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(
-					Math.min(p1y, p2y), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(
-					Math.max(p1x, p2x), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(
-					Math.max(p1y, p2y), dos);
-			tb.addFieldEndOffset();
-			DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
-			tb.addFieldEndOffset();
-			DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
-			tb.addFieldEndOffset();
-
-			appender.reset(hyracksFrame, true);
-			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
-					tb.getSize());
-
-			tuple.reset(accessor, 0);
-
-			if (i % 1000 == 0) {
-				print("INSERTING " + i + " " + Math.min(p1x, p2x) + " "
-						+ Math.min(p1y, p2y) + " " + Math.max(p1x, p2x) + " "
-						+ Math.max(p1y, p2y) + "\n");
-			}
-
-			try {
-				rtree.insert(tuple, insertOpCtx);
-			} catch (TreeIndexException e) {
-			} catch (Exception e) {
-				e.printStackTrace();
-			}
-		}
-
-		// rtree.printTree(leafFrame, interiorFrame, recDescSers);
-		// System.err.println();
-
-		String rtreeStats = rtree.printStats();
-		print(rtreeStats);
-
-		// disk-order scan
-		print("DISK-ORDER SCAN:\n");
-		TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(
-				leafFrame);
-		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(
-				IndexOp.DISKORDERSCAN, leafFrame, null, null);
-		rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame,
-				diskOrderScanOpCtx);
-		try {
-			while (diskOrderCursor.hasNext()) {
-				diskOrderCursor.next();
-				ITupleReference frameTuple = diskOrderCursor.getTuple();
-				String rec = cmp.printTuple(frameTuple, recDescSers);
-				print(rec + "\n");
-			}
-		} catch (Exception e) {
-			e.printStackTrace();
-		} finally {
-			diskOrderCursor.close();
-		}
-
-		TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(
-				bufferCache, freePageManager, fileId, rtree.getRootPageId());
-		TreeIndexStats stats = statsGatherer.gatherStats(leafFrame,
-				interiorFrame, metaFrame);
-		String string = stats.toString();
-		System.err.println(string);
-
-		rtree.close();
-		bufferCache.closeFile(fileId);
-		bufferCache.close();
-
-	}
-
-	// create an R-tree of two dimensions
 	// fill the R-tree with random values using insertions
 	// and then delete all the tuples which result of an empty R-tree
 	@Test
-	public void test03() throws Exception {
+	public void test02() throws Exception {
 
 		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
 				MAX_OPEN_FILES);
@@ -467,24 +298,24 @@
 		typeTraits[5] = new TypeTrait(4);
 		typeTraits[6] = new TypeTrait(8);
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		// declare value providers
+		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
+		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
+				.createPrimitiveValueProvider();
+		valueProviders[1] = valueProviders[0];
+		valueProviders[2] = valueProviders[0];
+		valueProviders[3] = valueProviders[0];
+
+		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
+				valueProviders);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
-		@SuppressWarnings("rawtypes")
-		ISerializerDeserializer[] recDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -504,6 +335,15 @@
 		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
 		DataOutput dos = tb.getDataOutput();
 
+		@SuppressWarnings("rawtypes")
+		ISerializerDeserializer[] recDescSers = {
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE };
 		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
 		IFrameTupleAccessor accessor = new FrameTupleAccessor(
 				ctx.getFrameSize(), recDesc);
@@ -645,7 +485,7 @@
 	// fill the R-tree with random values using insertions
 	// perform ordered scan
 	@Test
-	public void test04() throws Exception {
+	public void test03() throws Exception {
 
 		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
 				MAX_OPEN_FILES);
@@ -682,26 +522,26 @@
 		typeTraits[7] = new TypeTrait(4);
 		typeTraits[8] = new TypeTrait(8);
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		// declare value providers
+		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
+		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
+				.createPrimitiveValueProvider();
+		valueProviders[1] = valueProviders[0];
+		valueProviders[2] = valueProviders[0];
+		valueProviders[3] = valueProviders[0];
+		valueProviders[4] = valueProviders[0];
+		valueProviders[5] = valueProviders[0];
+
+		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
+				valueProviders);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
-		@SuppressWarnings("rawtypes")
-		ISerializerDeserializer[] recDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE };
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -721,6 +561,17 @@
 		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
 		DataOutput dos = tb.getDataOutput();
 
+		@SuppressWarnings("rawtypes")
+		ISerializerDeserializer[] recDescSers = {
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE };
 		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
 		IFrameTupleAccessor accessor = new FrameTupleAccessor(
 				ctx.getFrameSize(), recDesc);
@@ -832,4 +683,192 @@
 		bufferCache.close();
 
 	}
+
+	// create an R-tree of two dimensions
+	// fill the R-tree with random integer key values using insertions
+	// perform ordered scan
+	@Test
+	public void test04() throws Exception {
+
+		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
+				MAX_OPEN_FILES);
+		IBufferCache bufferCache = TestStorageManagerComponentHolder
+				.getBufferCache(ctx);
+		IFileMapProvider fmp = TestStorageManagerComponentHolder
+				.getFileMapProvider(ctx);
+		FileReference file = new FileReference(new File(fileName));
+		bufferCache.createFile(file);
+		int fileId = fmp.lookupFileId(file);
+		bufferCache.openFile(fileId);
+
+		// declare keys
+		int keyFieldCount = 4;
+		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
+				.createBinaryComparator();
+		cmps[1] = cmps[0];
+		cmps[2] = cmps[0];
+		cmps[3] = cmps[0];
+
+		// declare tuple fields
+		int fieldCount = 7;
+		ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+		typeTraits[0] = new TypeTrait(4);
+		typeTraits[1] = new TypeTrait(4);
+		typeTraits[2] = new TypeTrait(4);
+		typeTraits[3] = new TypeTrait(4);
+		typeTraits[4] = new TypeTrait(8);
+		typeTraits[5] = new TypeTrait(4);
+		typeTraits[6] = new TypeTrait(8);
+
+		// declare value providers
+		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
+		valueProviders[0] = IntegerPrimitiveValueProviderFactory.INSTANCE
+				.createPrimitiveValueProvider();
+		valueProviders[1] = valueProviders[0];
+		valueProviders[2] = valueProviders[0];
+		valueProviders[3] = valueProviders[0];
+
+		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
+				valueProviders);
+
+		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
+				typeTraits);
+
+		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
+				tupleWriterFactory, keyFieldCount);
+		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
+				tupleWriterFactory, keyFieldCount);
+		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+		IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory
+				.createFrame();
+		IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+		IFreePageManager freePageManager = new LinkedListFreePageManager(
+				bufferCache, fileId, 0, metaFrameFactory);
+
+		RTree rtree = new RTree(bufferCache, freePageManager,
+				interiorFrameFactory, leafFrameFactory, cmp);
+		rtree.create(fileId, leafFrame, metaFrame);
+		rtree.open(fileId);
+
+		ByteBuffer hyracksFrame = ctx.allocateFrame();
+		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		DataOutput dos = tb.getDataOutput();
+
+		@SuppressWarnings("rawtypes")
+		ISerializerDeserializer[] recDescSers = {
+				IntegerSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE };
+		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+		IFrameTupleAccessor accessor = new FrameTupleAccessor(
+				ctx.getFrameSize(), recDesc);
+		accessor.reset(hyracksFrame);
+		FrameTupleReference tuple = new FrameTupleReference();
+
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
+				leafFrame, interiorFrame, metaFrame);
+
+		Random rnd = new Random();
+		rnd.setSeed(50);
+
+		Random rnd2 = new Random();
+		rnd2.setSeed(50);
+		for (int i = 0; i < 5000; i++) {
+
+			int p1x = rnd.nextInt();
+			int p1y = rnd.nextInt();
+			int p2x = rnd.nextInt();
+			int p2y = rnd.nextInt();
+
+			double pk1 = rnd2.nextDouble();
+			int pk2 = rnd2.nextInt();
+			double pk3 = rnd2.nextDouble();
+
+			tb.reset();
+			IntegerSerializerDeserializer.INSTANCE.serialize(
+					Math.min(p1x, p2x), dos);
+			tb.addFieldEndOffset();
+			IntegerSerializerDeserializer.INSTANCE.serialize(
+					Math.min(p1y, p2y), dos);
+			tb.addFieldEndOffset();
+			IntegerSerializerDeserializer.INSTANCE.serialize(
+					Math.max(p1x, p2x), dos);
+			tb.addFieldEndOffset();
+			IntegerSerializerDeserializer.INSTANCE.serialize(
+					Math.max(p1y, p2y), dos);
+			tb.addFieldEndOffset();
+			DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+			tb.addFieldEndOffset();
+			IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+			tb.addFieldEndOffset();
+			DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+			tb.addFieldEndOffset();
+
+			appender.reset(hyracksFrame, true);
+			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+					tb.getSize());
+
+			tuple.reset(accessor, 0);
+
+			if (i % 1000 == 0) {
+				print("INSERTING " + i + " " + Math.min(p1x, p2x) + " "
+						+ Math.min(p1y, p2y) + " " + Math.max(p1x, p2x) + " "
+						+ Math.max(p1y, p2y) + "\n");
+			}
+
+			try {
+				rtree.insert(tuple, insertOpCtx);
+			} catch (TreeIndexException e) {
+			} catch (Exception e) {
+				e.printStackTrace();
+			}
+		}
+
+		// rtree.printTree(leafFrame, interiorFrame, recDescSers);
+		// System.err.println();
+
+		String rtreeStats = rtree.printStats();
+		print(rtreeStats);
+
+		// disk-order scan
+		print("DISK-ORDER SCAN:\n");
+		TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(
+				leafFrame);
+		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(
+				IndexOp.DISKORDERSCAN, leafFrame, null, null);
+		rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame,
+				diskOrderScanOpCtx);
+		try {
+			while (diskOrderCursor.hasNext()) {
+				diskOrderCursor.next();
+				ITupleReference frameTuple = diskOrderCursor.getTuple();
+				String rec = cmp.printTuple(frameTuple, recDescSers);
+				print(rec + "\n");
+			}
+		} catch (Exception e) {
+			e.printStackTrace();
+		} finally {
+			diskOrderCursor.close();
+		}
+
+		TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(
+				bufferCache, freePageManager, fileId, rtree.getRootPageId());
+		TreeIndexStats stats = statsGatherer.gatherStats(leafFrame,
+				interiorFrame, metaFrame);
+		String string = stats.toString();
+		System.err.println(string);
+
+		rtree.close();
+		bufferCache.closeFile(fileId);
+		bufferCache.close();
+
+	}
 }
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
index 09305be..b04afe5 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
@@ -43,6 +43,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
@@ -56,6 +57,7 @@
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
@@ -108,23 +110,24 @@
 		typeTraits[3] = new TypeTrait(8);
 		typeTraits[4] = new TypeTrait(4);
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		// declare value providers
+		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
+		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
+				.createPrimitiveValueProvider();
+		valueProviders[1] = valueProviders[0];
+		valueProviders[2] = valueProviders[0];
+		valueProviders[3] = valueProviders[0];
+
+		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
+				valueProviders);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
-		@SuppressWarnings("rawtypes")
-		ISerializerDeserializer[] recDescSers = {
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				DoubleSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE };
-
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, recDescSers, keyFieldCount);
+				tupleWriterFactory, keyFieldCount);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -145,6 +148,13 @@
 		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
 		DataOutput dos = tb.getDataOutput();
 
+		@SuppressWarnings("rawtypes")
+		ISerializerDeserializer[] recDescSers = {
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				DoubleSerializerDeserializer.INSTANCE,
+				IntegerSerializerDeserializer.INSTANCE };
 		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
 		IFrameTupleAccessor accessor = new FrameTupleAccessor(
 				ctx.getFrameSize(), recDesc);
@@ -271,4 +281,4 @@
 		bufferCache.closeFile(fileId);
 		bufferCache.close();
 	}
-}
+}
\ No newline at end of file