Merged hyracks_btree_updates_next into this branch.

git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_dev_next@674 123451ca-8445-de46-9d55-352943316053
diff --git a/.project b/.project
deleted file mode 100644
index cb5b36a..0000000
--- a/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-admin-console/.classpath b/hyracks-admin-console/.classpath
deleted file mode 100644
index c5921a9..0000000
--- a/hyracks-admin-console/.classpath
+++ /dev/null
@@ -1,14 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/hyracks-admin-console-0.1.8-SNAPSHOT/WEB-INF/classes" path="src/main/java"/>
-	<classpathentry excluding="**" kind="src" output="target/hyracks-admin-console-0.1.8-SNAPSHOT/WEB-INF/classes" path="src/main/resources"/>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources"/>
-	<classpathentry kind="src" path="target/generated-sources/gwt"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="con" path="com.google.gwt.eclipse.core.GWT_CONTAINER"/>
-	<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
-	<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
-	<classpathentry kind="output" path="target/hyracks-admin-console-0.1.8-SNAPSHOT/WEB-INF/classes"/>
-</classpath>
diff --git a/hyracks-admin-console/.project b/hyracks-admin-console/.project
deleted file mode 100644
index df60f9a..0000000
--- a/hyracks-admin-console/.project
+++ /dev/null
@@ -1,53 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-admin-console</name>
-	<comment>hyracks-admin-console project</comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.eclipse.wst.common.project.facet.core.builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.eclipse.wst.validation.validationbuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>com.google.gdt.eclipse.core.webAppProjectValidator</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>com.google.gwt.eclipse.core.gwtProjectValidator</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
-		<nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.eclipse.wst.common.project.facet.core.nature</nature>
-		<nature>org.eclipse.wst.jsdt.core.jsNature</nature>
-		<nature>com.google.gwt.eclipse.core.gwtNature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-admin-console/.settings/com.google.appengine.eclipse.core.prefs b/hyracks-admin-console/.settings/com.google.appengine.eclipse.core.prefs
deleted file mode 100644
index a60576c..0000000
--- a/hyracks-admin-console/.settings/com.google.appengine.eclipse.core.prefs
+++ /dev/null
@@ -1,3 +0,0 @@
-#Thu Jun 16 10:18:26 CEST 2011
-eclipse.preferences.version=1
-filesCopiedToWebInfLib=
diff --git a/hyracks-admin-console/.settings/com.google.gdt.eclipse.core.prefs b/hyracks-admin-console/.settings/com.google.gdt.eclipse.core.prefs
deleted file mode 100644
index 8019224..0000000
--- a/hyracks-admin-console/.settings/com.google.gdt.eclipse.core.prefs
+++ /dev/null
@@ -1,5 +0,0 @@
-#Thu Sep 02 10:55:28 CEST 2010
-eclipse.preferences.version=1
-jarsExcludedFromWebInfLib=
-warSrcDir=src/main/webapp
-warSrcDirIsOutput=true
diff --git a/hyracks-admin-console/.settings/com.google.gwt.eclipse.core.prefs b/hyracks-admin-console/.settings/com.google.gwt.eclipse.core.prefs
deleted file mode 100644
index c803c44..0000000
--- a/hyracks-admin-console/.settings/com.google.gwt.eclipse.core.prefs
+++ /dev/null
@@ -1,5 +0,0 @@
-#Thu Jun 16 11:14:17 CEST 2011
-eclipse.preferences.version=1
-entryPointModules=
-filesCopiedToWebInfLib=gwt-servlet.jar
-gwtCompileSettings=PGd3dC1jb21waWxlLXNldHRpbmdzPjxsb2ctbGV2ZWw+SU5GTzwvbG9nLWxldmVsPjxvdXRwdXQtc3R5bGU+T0JGVVNDQVRFRDwvb3V0cHV0LXN0eWxlPjxleHRyYS1hcmdzPjwhW0NEQVRBWy13YXIgc3JjL21haW4vd2ViYXBwXV0+PC9leHRyYS1hcmdzPjx2bS1hcmdzPjwhW0NEQVRBWy1YbXg1MTJtXV0+PC92bS1hcmdzPjxlbnRyeS1wb2ludC1tb2R1bGU+Y29tLmNvbXBhbnkuU29tZU1vZHVsZTwvZW50cnktcG9pbnQtbW9kdWxlPjwvZ3d0LWNvbXBpbGUtc2V0dGluZ3M+
diff --git a/hyracks-admin-console/.settings/org.eclipse.jdt.core.prefs b/hyracks-admin-console/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 96e4c8c..0000000
--- a/hyracks-admin-console/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Aug 24 10:20:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-admin-console/.settings/org.maven.ide.eclipse.prefs b/hyracks-admin-console/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index c74c58e..0000000
--- a/hyracks-admin-console/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Sep 02 10:42:12 CEST 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-api/.classpath b/hyracks-api/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-api/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-api/.project b/hyracks-api/.project
deleted file mode 100644
index 4ae9e54..0000000
--- a/hyracks-api/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-api</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTrait.java b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTrait.java
index 0f46f57..639617d 100644
--- a/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTrait.java
+++ b/hyracks-api/src/main/java/edu/uci/ics/hyracks/api/dataflow/value/ITypeTrait.java
@@ -3,6 +3,13 @@
 import java.io.Serializable;
 
 public interface ITypeTrait extends Serializable {
-	public static final int VARIABLE_LENGTH = -1;	
+	public static final int VARIABLE_LENGTH = -1;
+	
+	public static final ITypeTrait INTEGER_TYPE_TRAIT = new TypeTrait(4);
+    public static final ITypeTrait INTEGER64_TYPE_TRAIT = new TypeTrait(8);
+    public static final ITypeTrait FLOAT_TYPE_TRAIT = new TypeTrait(4);
+    public static final ITypeTrait DOUBLE_TYPE_TRAIT = new TypeTrait(8);
+    public static final ITypeTrait BOOLEAN_TYPE_TRAIT = new TypeTrait(1);
+    public static final ITypeTrait VARLEN_TYPE_TRAIT = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);	
 	int getStaticallyKnownDataLength();
 }
diff --git a/hyracks-cli/.classpath b/hyracks-cli/.classpath
deleted file mode 100644
index ba0bb5a..0000000
--- a/hyracks-cli/.classpath
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="src" path="target/generated-sources/javacc"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-cli/.project b/hyracks-cli/.project
deleted file mode 100644
index 9741f63..0000000
--- a/hyracks-cli/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-cli</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-control-cc/.classpath b/hyracks-control-cc/.classpath
deleted file mode 100644
index fb2f7c1..0000000
--- a/hyracks-control-cc/.classpath
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-control-cc/.project b/hyracks-control-cc/.project
deleted file mode 100644
index 271840b..0000000
--- a/hyracks-control-cc/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-control-cc</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs b/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 375e12e..0000000
--- a/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-control-cc/.settings/org.maven.ide.eclipse.prefs b/hyracks-control-cc/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index d783847..0000000
--- a/hyracks-control-cc/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Fri Jul 30 17:52:26 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-control-common/.classpath b/hyracks-control-common/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-control-common/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-control-common/.project b/hyracks-control-common/.project
deleted file mode 100644
index f6c06b0..0000000
--- a/hyracks-control-common/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-control-common</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 450f5c4..0000000
--- a/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:04 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-control-common/.settings/org.maven.ide.eclipse.prefs b/hyracks-control-common/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 7e87666..0000000
--- a/hyracks-control-common/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Fri Jul 30 07:32:48 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-control-nc/.classpath b/hyracks-control-nc/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-control-nc/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-control-nc/.project b/hyracks-control-nc/.project
deleted file mode 100644
index 83d4a09..0000000
--- a/hyracks-control-nc/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-control-nc</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-dataflow-common/.classpath b/hyracks-dataflow-common/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-dataflow-common/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-dataflow-common/.project b/hyracks-dataflow-common/.project
deleted file mode 100644
index 8fd9acf..0000000
--- a/hyracks-dataflow-common/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-dataflow-common</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
index 450f5c4..e11b136 100644
--- a/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,285 @@
-#Fri May 20 19:34:04 PDT 2011
+#Tue Oct 04 21:57:46 PDT 2011
 eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
 org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
 org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.debug.lineNumber=generate
+org.eclipse.jdt.core.compiler.debug.localVariable=generate
+org.eclipse.jdt.core.compiler.debug.sourceFile=generate
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
 org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
 org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation=0
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_assignment=0
+org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
+org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.jdt.core.formatter.alignment_for_method_declaration=0
+org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_after_package=1
+org.eclipse.jdt.core.formatter.blank_lines_before_field=0
+org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
+org.eclipse.jdt.core.formatter.blank_lines_before_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
+org.eclipse.jdt.core.formatter.blank_lines_before_package=0
+org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
+org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
+org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
+org.eclipse.jdt.core.formatter.comment.format_block_comments=true
+org.eclipse.jdt.core.formatter.comment.format_header=false
+org.eclipse.jdt.core.formatter.comment.format_html=true
+org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
+org.eclipse.jdt.core.formatter.comment.format_line_comments=true
+org.eclipse.jdt.core.formatter.comment.format_source_code=true
+org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
+org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
+org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
+org.eclipse.jdt.core.formatter.comment.line_length=80
+org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries=true
+org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries=true
+org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments=false
+org.eclipse.jdt.core.formatter.compact_else_if=true
+org.eclipse.jdt.core.formatter.continuation_indentation=2
+org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.jdt.core.formatter.disabling_tag=@formatter\:off
+org.eclipse.jdt.core.formatter.enabling_tag=@formatter\:on
+org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
+org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_empty_lines=false
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
+org.eclipse.jdt.core.formatter.indentation.size=4
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_label=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
+org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.join_lines_in_comments=true
+org.eclipse.jdt.core.formatter.join_wrapped_lines=true
+org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.lineSplit=120
+org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.jdt.core.formatter.tabulation.char=space
+org.eclipse.jdt.core.formatter.tabulation.size=4
+org.eclipse.jdt.core.formatter.use_on_off_tags=false
+org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
+org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
+org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested=true
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java
new file mode 100644
index 0000000..b265283
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/comm/io/ArrayTupleReference.java
@@ -0,0 +1,41 @@
+package edu.uci.ics.hyracks.dataflow.common.comm.io;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+/**
+ * An ArrayTupleReference provides access to a tuple that is not serialized into
+ * a frame. It is meant to be reset directly with the field slots and tuple data
+ * provided by ArrayTupleBuilder. The purpose is to avoid coping the built tuple
+ * into a frame before being able to use it as an ITupleReference.
+ * 
+ * @author alexander.behm
+ */
+public class ArrayTupleReference implements ITupleReference {
+	private int[] fEndOffsets;
+	private byte[] tupleData;
+
+	public void reset(int[] fEndOffsets, byte[] tupleData) {
+		this.fEndOffsets = fEndOffsets;
+		this.tupleData = tupleData;
+	}
+	
+	@Override
+	public int getFieldCount() {
+		return fEndOffsets.length;
+	}
+
+	@Override
+	public byte[] getFieldData(int fIdx) {
+		return tupleData;
+	}
+
+	@Override
+	public int getFieldStart(int fIdx) {
+		return (fIdx == 0) ? 0 : fEndOffsets[fIdx - 1]; 
+	}
+
+	@Override
+	public int getFieldLength(int fIdx) {
+		return (fIdx == 0) ? fEndOffsets[0] : fEndOffsets[fIdx] - fEndOffsets[fIdx - 1];
+	}
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
index ff45eba..a21d5e9 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/accessors/FrameTupleReference.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.hyracks.dataflow.common.data.accessors;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparator.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparator.java
new file mode 100644
index 0000000..dde4d7b
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparator.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+
+public class DoubleBinaryComparator implements IBinaryComparator {
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {   
+        return Double.compare(DoubleSerializerDeserializer.getDouble(b1, s1), DoubleSerializerDeserializer
+                .getDouble(b2, s2));
+    }
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java
index 983dcb8..0782ab5 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java
@@ -1,8 +1,21 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
 package edu.uci.ics.hyracks.dataflow.common.data.comparators;
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 
 public class DoubleBinaryComparatorFactory implements IBinaryComparatorFactory {
     private static final long serialVersionUID = 1L;
@@ -14,12 +27,6 @@
 
     @Override
     public IBinaryComparator createBinaryComparator() {
-        return new IBinaryComparator() {
-            @Override
-            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {   
-                return Double.compare(DoubleSerializerDeserializer.getDouble(b1, s1), DoubleSerializerDeserializer
-                        .getDouble(b2, s2));
-            }
-        };
+        return new DoubleBinaryComparator();
     }
 }
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparator.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparator.java
new file mode 100644
index 0000000..d168e29
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparator.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+
+public class FloatBinaryComparator implements IBinaryComparator {
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+        return Float.compare(FloatSerializerDeserializer.getFloat(b1, s1), FloatSerializerDeserializer
+                .getFloat(b2, s2));
+    }
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparatorFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparatorFactory.java
index 46abc34..97224b2 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparatorFactory.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/FloatBinaryComparatorFactory.java
@@ -16,7 +16,6 @@
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
 
 public class FloatBinaryComparatorFactory implements IBinaryComparatorFactory {
     private static final long serialVersionUID = 1L;
@@ -28,12 +27,6 @@
 
     @Override
     public IBinaryComparator createBinaryComparator() {
-        return new IBinaryComparator() {
-            @Override
-            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-                return Float.compare(FloatSerializerDeserializer.getFloat(b1, s1), FloatSerializerDeserializer
-                        .getFloat(b2, s2));
-            }
-        };
+        return new FloatBinaryComparator();
     }
 }
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparator.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparator.java
new file mode 100644
index 0000000..b6c4358
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparator.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+public class IntegerBinaryComparator implements IBinaryComparator {
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+        int v1 = IntegerSerializerDeserializer.getInt(b1, s1);
+        int v2 = IntegerSerializerDeserializer.getInt(b2, s2);
+        return v1 < v2 ? -1 : (v1 > v2 ? 1 : 0);
+    }
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparatorFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparatorFactory.java
index 493d4ee..d67c825 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparatorFactory.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/IntegerBinaryComparatorFactory.java
@@ -16,7 +16,6 @@
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 
 public class IntegerBinaryComparatorFactory implements IBinaryComparatorFactory {
     private static final long serialVersionUID = 1L;
@@ -28,13 +27,6 @@
 
     @Override
     public IBinaryComparator createBinaryComparator() {
-        return new IBinaryComparator() {
-            @Override
-            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-                int v1 = IntegerSerializerDeserializer.getInt(b1, s1);
-                int v2 = IntegerSerializerDeserializer.getInt(b2, s2);
-                return v1 < v2 ? -1 : (v1 > v2 ? 1 : 0);
-            }
-        };
+        return new IntegerBinaryComparator();
     }
 }
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparator.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparator.java
new file mode 100644
index 0000000..31f8a86
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparator.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class UTF8StringBinaryComparator implements IBinaryComparator {
+    @Override
+    public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+        int utflen1 = StringUtils.getUTFLen(b1, s1);
+        int utflen2 = StringUtils.getUTFLen(b2, s2);
+
+        int c1 = 0;
+        int c2 = 0;
+
+        int s1Start = s1 + 2;
+        int s2Start = s2 + 2;
+
+        while (c1 < utflen1 && c2 < utflen2) {
+            char ch1 = StringUtils.charAt(b1, s1Start + c1);
+            char ch2 = StringUtils.charAt(b2, s2Start + c2);
+
+            if (ch1 != ch2) {
+                return ch1 - ch2;
+            }
+            c1 += StringUtils.charSize(b1, s1Start + c1);
+            c2 += StringUtils.charSize(b2, s2Start + c2);
+        }
+        return utflen1 - utflen2;
+    }
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparatorFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparatorFactory.java
index 439859d..2d90dcd 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparatorFactory.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/UTF8StringBinaryComparatorFactory.java
@@ -16,7 +16,6 @@
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
 
 public class UTF8StringBinaryComparatorFactory implements IBinaryComparatorFactory {
     private static final long serialVersionUID = 1L;
@@ -28,30 +27,6 @@
 
     @Override
     public IBinaryComparator createBinaryComparator() {
-        return new IBinaryComparator() {
-            @Override
-            public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
-                int utflen1 = StringUtils.getUTFLen(b1, s1);
-                int utflen2 = StringUtils.getUTFLen(b2, s2);
-
-                int c1 = 0;
-                int c2 = 0;
-
-                int s1Start = s1 + 2;
-                int s2Start = s2 + 2;
-
-                while (c1 < utflen1 && c2 < utflen2) {
-                    char ch1 = StringUtils.charAt(b1, s1Start + c1);
-                    char ch2 = StringUtils.charAt(b2, s2Start + c2);
-
-                    if (ch1 != ch2) {
-                        return ch1 - ch2;
-                    }
-                    c1 += StringUtils.charSize(b1, s1Start + c1);
-                    c2 += StringUtils.charSize(b2, s2Start + c2);
-                }
-                return utflen1 - utflen2;
-            }
-        };
+        return new UTF8StringBinaryComparator();
     }
 }
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
new file mode 100644
index 0000000..d690279
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.common.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.FloatBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.BooleanSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.FloatSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.Integer64SerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+@SuppressWarnings("rawtypes")
+public class SerdeUtils {
+    public static ITypeTrait[] serdesToTypeTraits(ISerializerDeserializer[] serdes, int numSerdes) {
+        ITypeTrait[] typeTraits = new ITypeTrait[numSerdes];
+        for (int i = 0; i < numSerdes; i++) {
+            typeTraits[i] = serdeToTypeTrait(serdes[i]);
+        }
+        return typeTraits;
+    }
+
+    public static ITypeTrait serdeToTypeTrait(ISerializerDeserializer serde) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return ITypeTrait.INTEGER_TYPE_TRAIT;
+        }
+        if (serde instanceof Integer64SerializerDeserializer) {
+            return ITypeTrait.INTEGER64_TYPE_TRAIT;
+        }
+        if (serde instanceof FloatSerializerDeserializer) {
+            return ITypeTrait.FLOAT_TYPE_TRAIT;
+        }
+        if (serde instanceof DoubleSerializerDeserializer) {
+            return ITypeTrait.DOUBLE_TYPE_TRAIT;
+        }
+        if (serde instanceof BooleanSerializerDeserializer) {
+            return ITypeTrait.BOOLEAN_TYPE_TRAIT;
+        }
+        return ITypeTrait.VARLEN_TYPE_TRAIT;
+    }
+
+    public static IBinaryComparator[] serdesToComparators(ISerializerDeserializer[] serdes, int numSerdes) {
+        IBinaryComparator[] comparators = new IBinaryComparator[numSerdes];
+        for (int i = 0; i < numSerdes; i++) {
+            comparators[i] = serdeToComparator(serdes[i]);
+        }
+        return comparators;
+    }
+
+    public static IBinaryComparator serdeToComparator(ISerializerDeserializer serde) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        }
+        if (serde instanceof Integer64SerializerDeserializer) {
+            throw new UnsupportedOperationException("Binary comparator for Integer64 not implemented.");
+        }
+        if (serde instanceof FloatSerializerDeserializer) {
+            return FloatBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        }
+        if (serde instanceof DoubleSerializerDeserializer) {
+            return DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        }
+        if (serde instanceof BooleanSerializerDeserializer) {
+            throw new UnsupportedOperationException("Binary comparator for Boolean not implemented.");
+        }
+        if (serde instanceof UTF8StringSerializerDeserializer) {
+            return UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        }
+        throw new UnsupportedOperationException("Binary comparator for + " + serde.toString() + " not implemented.");
+    }
+    
+    public static IBinaryComparatorFactory[] serdesToComparatorFactories(ISerializerDeserializer[] serdes, int numSerdes) {
+        IBinaryComparatorFactory[] comparatorsFactories = new IBinaryComparatorFactory[numSerdes];
+        for (int i = 0; i < numSerdes; i++) {
+            comparatorsFactories[i] = serdeToComparatorFactory(serdes[i]);
+        }
+        return comparatorsFactories;
+    }
+
+    public static IBinaryComparatorFactory serdeToComparatorFactory(ISerializerDeserializer serde) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return IntegerBinaryComparatorFactory.INSTANCE;
+        }
+        if (serde instanceof Integer64SerializerDeserializer) {
+            throw new UnsupportedOperationException("Binary comparator factory for Integer64 not implemented.");
+        }
+        if (serde instanceof FloatSerializerDeserializer) {
+            return FloatBinaryComparatorFactory.INSTANCE;
+        }
+        if (serde instanceof DoubleSerializerDeserializer) {
+            return DoubleBinaryComparatorFactory.INSTANCE;
+        }
+        if (serde instanceof BooleanSerializerDeserializer) {
+            throw new UnsupportedOperationException("Binary comparator factory for Boolean not implemented.");
+        }
+        if (serde instanceof UTF8StringSerializerDeserializer) {
+            return UTF8StringBinaryComparatorFactory.INSTANCE;
+        }
+        throw new UnsupportedOperationException("Binary comparator for + " + serde.toString() + " not implemented.");
+    }
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
new file mode 100644
index 0000000..1e9c47f
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/TupleUtils.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.dataflow.common.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+@SuppressWarnings("rawtypes") 
+public class TupleUtils {    
+    @SuppressWarnings("unchecked")
+    public static void createTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes, final Object... fields) throws HyracksDataException {
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (int i = 0; i < fields.length; i++) {  
+            fieldSerdes[i].serialize(fields[i], dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    public static ITupleReference createTuple(ISerializerDeserializer[] fieldSerdes, final Object... fields) throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        createTuple(tupleBuilder, tuple, fieldSerdes, fields);
+        return tuple;
+    }
+    
+    public static void createIntegerTuple(ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple,
+            final int... fields) throws HyracksDataException {
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (final int i : fields) {
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+
+    public static ITupleReference createIntegerTuple(final int... fields) throws HyracksDataException {
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fields.length);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        createIntegerTuple(tupleBuilder, tuple, fields);
+        return tuple;
+    }
+    
+    public static String printTuple(ITupleReference tuple,
+            ISerializerDeserializer[] fields) throws HyracksDataException {
+        StringBuilder strBuilder = new StringBuilder();
+        int numPrintFields = Math.min(tuple.getFieldCount(), fields.length);
+        for (int i = 0; i < numPrintFields; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(
+                    tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object o = fields[i].deserialize(dataIn);
+            strBuilder.append(o.toString());
+            if (i != fields.length - 1) {
+                strBuilder.append(" ");
+            }
+        }        
+        return strBuilder.toString();
+    }
+}
diff --git a/hyracks-dataflow-hadoop/.classpath b/hyracks-dataflow-hadoop/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-dataflow-hadoop/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-dataflow-hadoop/.project b/hyracks-dataflow-hadoop/.project
deleted file mode 100644
index d6edecf..0000000
--- a/hyracks-dataflow-hadoop/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-dataflow-hadoop</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-dataflow-std/.classpath b/hyracks-dataflow-std/.classpath
deleted file mode 100644
index 31cf404..0000000
--- a/hyracks-dataflow-std/.classpath
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-dataflow-std/.project b/hyracks-dataflow-std/.project
deleted file mode 100644
index 1101a65..0000000
--- a/hyracks-dataflow-std/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-dataflow-std</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs b/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 450f5c4..0000000
--- a/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:04 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-dataflow-std/.settings/org.maven.ide.eclipse.prefs b/hyracks-dataflow-std/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index e96df89..0000000
--- a/hyracks-dataflow-std/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jul 29 14:32:56 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-documentation/.classpath b/hyracks-documentation/.classpath
deleted file mode 100644
index 3f62785..0000000
--- a/hyracks-documentation/.classpath
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-documentation/.project b/hyracks-documentation/.project
deleted file mode 100644
index 64d4505..0000000
--- a/hyracks-documentation/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-documentation</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs b/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index f362c73..0000000
--- a/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Sun Aug 14 10:18:14 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
diff --git a/hyracks-documentation/.settings/org.maven.ide.eclipse.prefs b/hyracks-documentation/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 1b662c1..0000000
--- a/hyracks-documentation/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Oct 19 13:07:01 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/.project b/hyracks-examples/.project
deleted file mode 100644
index 7fb5a5d..0000000
--- a/hyracks-examples/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-examples</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 1b2848c..0000000
--- a/hyracks-examples/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Sun Aug 29 19:38:09 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/btree-example/.project b/hyracks-examples/btree-example/.project
deleted file mode 100644
index ef68a5a..0000000
--- a/hyracks-examples/btree-example/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>btree-example</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/btree-example/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/btree-example/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 6733f8d..0000000
--- a/hyracks-examples/btree-example/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Oct 06 08:06:48 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/btree-example/btreeapp/.classpath b/hyracks-examples/btree-example/btreeapp/.classpath
deleted file mode 100644
index 3f62785..0000000
--- a/hyracks-examples/btree-example/btreeapp/.classpath
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/btree-example/btreeapp/.project b/hyracks-examples/btree-example/btreeapp/.project
deleted file mode 100644
index 897fb85..0000000
--- a/hyracks-examples/btree-example/btreeapp/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>btreeapp</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index f362c73..0000000
--- a/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Sun Aug 14 10:18:14 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
diff --git a/hyracks-examples/btree-example/btreeapp/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/btree-example/btreeapp/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 6733f8d..0000000
--- a/hyracks-examples/btree-example/btreeapp/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Oct 06 08:06:48 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks-examples/btree-example/btreeapp/pom.xml
index ae8c3fd..d5239bd 100644
--- a/hyracks-examples/btree-example/btreeapp/pom.xml
+++ b/hyracks-examples/btree-example/btreeapp/pom.xml
@@ -11,6 +11,35 @@
   </parent>
 
   <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/btree-example/btreeclient/.classpath b/hyracks-examples/btree-example/btreeclient/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/btree-example/btreeclient/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/btree-example/btreeclient/.project b/hyracks-examples/btree-example/btreeclient/.project
deleted file mode 100644
index 8e16966..0000000
--- a/hyracks-examples/btree-example/btreeclient/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>btreeclient</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 7cf8ad6..0000000
--- a/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,264 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=0
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
-org.eclipse.jdt.core.formatter.comment.format_line_comments=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.join_lines_in_comments=true
-org.eclipse.jdt.core.formatter.join_wrapped_lines=true
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=120
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=space
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks-examples/btree-example/btreeclient/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/btree-example/btreeclient/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 6733f8d..0000000
--- a/hyracks-examples/btree-example/btreeclient/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Oct 06 08:06:48 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
index 05b16c8..1b90300 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -140,6 +140,10 @@
         primaryTypeTraits[2] = new TypeTrait(4);
         primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 
+        // comparator factories for primary index
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
+        primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for secondary B-Tree
         TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
         ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(primaryTupleWriterFactory);
@@ -149,10 +153,7 @@
         // tuple
         int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
                                                         // tuple to field 0 of
-                                                        // B-Tree tuple, etc.
-        // comparator factories for primary index
-        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
-        primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+                                                        // B-Tree tuple, etc.        
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
 
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
@@ -161,7 +162,7 @@
         TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                 spec, recDesc, storageManager, treeIndexRegistryProvider, primarySplitProvider,
                 primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories,
-                null, primaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
+                primaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
 
         // prepare insertion into secondary index
@@ -171,6 +172,11 @@
         secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
         secondaryTypeTraits[1] = new TypeTrait(4);
 
+        // comparator factories for secondary index
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
+        secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+        secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for secondary B-Tree
         TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
         ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
@@ -180,17 +186,13 @@
         // the B-Tree expects its keyfields to be at the front of its input
         // tuple
         int[] secondaryFieldPermutation = { 1, 2 };
-        // comparator factories for primary index
-        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
-        secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-        secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
         IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
                 options.secondaryBTreeName);
         // create operator descriptor
         TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
                 spec, recDesc, storageManager, treeIndexRegistryProvider, secondarySplitProvider,
                 secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
-                secondaryComparatorFactories, null, secondaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
+                secondaryComparatorFactories, secondaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
 
         // end the insert pipeline at this sink operator
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 8019080..4656a88 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -159,7 +159,7 @@
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
                 storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory,
-                typeTraits, comparatorFactories, null, fieldPermutation, 0.7f, opHelperFactory);
+                typeTraits, comparatorFactories, fieldPermutation, 0.7f, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
         // distribute the records from the datagen via hashing to the bulk load
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
index b75f64d..ae55700 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
@@ -98,6 +98,9 @@
         typeTraits[2] = new TypeTrait(4);
         typeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for B-Tree
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
@@ -105,14 +108,11 @@
         IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
         IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
 
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
-        comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
-
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
         TreeIndexFileEnlistmentOperatorDescriptor fileEnlistmentOp = new TreeIndexFileEnlistmentOperatorDescriptor(
                 spec, recDesc, storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, null, opHelperFactory);
+                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, fileEnlistmentOp, splitNCs);
 
         spec.addRoot(fileEnlistmentOp);
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index fbf7f9d..1d61a0f 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -101,6 +101,10 @@
         typeTraits[2] = new TypeTrait(4);
         typeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 
+        // comparators for btree
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+        comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for B-Tree
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
@@ -113,10 +117,6 @@
                 IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
 
-        // comparators for btree
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
-        comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
-
         // build tuple containing low and high search keys
         ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // high
                                                                                       // key
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 21c00a1..d38568f 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -117,6 +117,11 @@
         primaryTypeTraits[2] = new TypeTrait(4);
         primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 
+        // comparators for sort fields and BTree fields
+        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+        comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+        comparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for primary B-Tree
         TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
         ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(primaryTupleWriterFactory);
@@ -133,10 +138,6 @@
         // sort the tuples as preparation for bulk load into secondary index
         // fields to sort on
         int[] sortFields = { 1, 0 };
-        // comparators for sort fields
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
-        comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-        comparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
         ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
                 comparatorFactories, recDesc);
         JobHelper.createPartitionConstraint(spec, sorter, splitNCs);
@@ -159,7 +160,7 @@
         IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
         TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
                 storageManager, treeIndexRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory,
-                secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, null, fieldPermutation, 0.7f,
+                secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f,
                 opHelperFactory);
         JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
 
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 5eec60c..9b5d6fa 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -30,6 +30,7 @@
 import edu.uci.ics.hyracks.api.job.JobId;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
@@ -109,6 +110,15 @@
         secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
         secondaryTypeTraits[1] = new TypeTrait(4);
 
+        // comparators for sort fields and BTree fields
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
+        secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+        secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+        
+        // comparators for primary index
+        IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
+        primaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+        
         // create factories and providers for secondary B-Tree
         TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
         ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
@@ -136,11 +146,11 @@
         // non-unique key
         // i.e. we will have a range condition on the first field only (implying
         // [-infinity, +infinity] for the second field)
-        IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
-        comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
+        IBinaryComparatorFactory[] searchComparatorFactories = new IBinaryComparatorFactory[1];
+        searchComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+        
         // build tuple containing low and high search keys
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // low
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(searchComparatorFactories.length * 2); // low
                                                                                       // and
                                                                                       // high
                                                                                       // key
@@ -174,7 +184,7 @@
         ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
         BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
                 storageManager, btreeRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
-                secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, true, secondaryLowKeyFields,
+                secondaryLeafFrameFactory, secondaryTypeTraits, searchComparatorFactories, true, secondaryLowKeyFields,
                 secondaryHighKeyFields, true, true, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
 
@@ -190,7 +200,7 @@
         IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
         BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
                 storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
-                primaryLeafFrameFactory, primaryTypeTraits, comparatorFactories, true, primaryLowKeyFields,
+                primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, true, primaryLowKeyFields,
                 primaryHighKeyFields, true, true, opHelperFactory);
         JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
 
diff --git a/hyracks-examples/btree-example/btreehelper/.classpath b/hyracks-examples/btree-example/btreehelper/.classpath
deleted file mode 100644
index fb2f7c1..0000000
--- a/hyracks-examples/btree-example/btreehelper/.classpath
+++ /dev/null
@@ -1,8 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/btree-example/btreehelper/.project b/hyracks-examples/btree-example/btreehelper/.project
deleted file mode 100644
index c65470b..0000000
--- a/hyracks-examples/btree-example/btreehelper/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>btreehelper</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index dfac000..0000000
--- a/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:05 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/btree-example/btreehelper/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/btree-example/btreehelper/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 6733f8d..0000000
--- a/hyracks-examples/btree-example/btreehelper/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Oct 06 08:06:48 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hadoop-compat-example/.project b/hyracks-examples/hadoop-compat-example/.project
deleted file mode 100644
index 4e057cd..0000000
--- a/hyracks-examples/hadoop-compat-example/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>text-example</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hadoop-compat-example/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/hadoop-compat-example/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/hadoop-compat-example/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.classpath b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.project b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.project
deleted file mode 100644
index 4f3af14..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>textapp</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 692202d..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,13 +0,0 @@
-#Thu May 19 22:55:12 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
index 31b0d23..15d31b6 100644
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
+++ b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
@@ -11,6 +11,35 @@
   </parent>
 
   <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.classpath b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.classpath
deleted file mode 100644
index ed53773..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" path="src"/>
-	<classpathentry kind="src" path="compat-client"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
-	<classpathentry kind="output" path="bin"/>
-</classpath>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.project b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.project
deleted file mode 100644
index 7054f52..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/src/test/java/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>compat-app</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.classpath b/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.project b/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.project
deleted file mode 100644
index 04307d3..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>textclient</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 8599738..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatclient/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.classpath b/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.project b/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.project
deleted file mode 100644
index 19ce234..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>texthelper</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 8599738..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/hadoop-compat-example/hadoopcompathelper/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hyracks-integration-tests/.classpath b/hyracks-examples/hyracks-integration-tests/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-examples/hyracks-integration-tests/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/hyracks-integration-tests/.project b/hyracks-examples/hyracks-integration-tests/.project
deleted file mode 100644
index 91bbbe6..0000000
--- a/hyracks-examples/hyracks-integration-tests/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-integration-tests</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index a80ec7b..0000000
--- a/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:08 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/hyracks-integration-tests/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/hyracks-integration-tests/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 6b45873..0000000
--- a/hyracks-examples/hyracks-integration-tests/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jul 29 15:30:15 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
index ba7621d..f320a48 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexScanOperatorTest.java
@@ -168,7 +168,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
index c64424d..ae5c04e 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexSearchOperatorTest.java
@@ -169,7 +169,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
index e8fd60b..e685894 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreePrimaryIndexStatsOperatorTest.java
@@ -164,7 +164,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
index 5c527cb..9d21dd5 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexInsertOperatorTest.java
@@ -206,7 +206,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -272,7 +272,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, null, fieldPermutation, 0.7f,
+				secondaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryBtreeBulkLoad, NC1_ID);
@@ -330,7 +330,7 @@
 				spec, ordersDesc, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, primaryFieldPermutation,
+				primaryComparatorFactories, primaryFieldPermutation,
 				IndexOp.INSERT, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeInsertOp, NC1_ID);
@@ -341,7 +341,7 @@
 				spec, ordersDesc, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, null, fieldPermutationB,
+				secondaryComparatorFactories, fieldPermutationB,
 				IndexOp.INSERT, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryInsertOp, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
index 60c3b13..89595d6 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeSecondaryIndexSearchOperatorTest.java
@@ -202,7 +202,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBtreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBtreeBulkLoad, NC1_ID);
@@ -268,7 +268,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, null, fieldPermutation, 0.7f,
+				secondaryComparatorFactories, fieldPermutation, 0.7f,
 				opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryBtreeBulkLoad, NC1_ID);
@@ -361,7 +361,6 @@
 	public static void cleanup() throws Exception {
 		File primary = new File(primaryFileName);
 		primary.deleteOnExit();
-
 		File secondary = new File(secondaryFileName);
 		secondary.deleteOnExit();
 	}
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
index 1923c10..ec2d5dd 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexSearchOperatorTest.java
@@ -57,8 +57,8 @@
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
@@ -84,7 +84,6 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
@@ -97,10 +96,8 @@
 					DoubleSerializerDeserializer.INSTANCE,
 					UTF8StringSerializerDeserializer.INSTANCE });
 
-	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
-	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
+	private ITreeIndexFrameFactory primaryInteriorFrameFactory;
+	private ITreeIndexFrameFactory primaryLeafFrameFactory;
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -123,11 +120,15 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
-		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
-		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
-
+		
+		IPrimitiveValueProviderFactory[] primaryValueProviderFactories = RTreeUtils
+				.comparatorFactoriesToPrimitiveValueProviderFactories(primaryComparatorFactories);
+		
+		primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		
 		loadPrimaryIndexTest();
 	}
 
@@ -162,7 +163,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, primaryValueProviderFactories,
+				primaryComparatorFactories,
 				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
@@ -212,7 +213,7 @@
 				treeIndexRegistryProvider, primaryRTreeSplitProvider,
 				primaryInteriorFrameFactory, primaryLeafFrameFactory,
 				primaryTypeTraits, primaryComparatorFactories,
-				primaryValueProviderFactories, keyFields, opHelperFactory);
+				keyFields, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeSearchOp, NC1_ID);
 
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
index b6b4e20..1b1d760 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreePrimaryIndexStatsOperatorTest.java
@@ -53,8 +53,8 @@
 import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeOpHelperFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
@@ -79,7 +79,6 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
@@ -92,10 +91,8 @@
 					DoubleSerializerDeserializer.INSTANCE,
 					UTF8StringSerializerDeserializer.INSTANCE });
 
-	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
-	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
+	private ITreeIndexFrameFactory primaryInteriorFrameFactory;
+	private ITreeIndexFrameFactory primaryLeafFrameFactory;
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -118,11 +115,15 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
-		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
-		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
-
+		
+		IPrimitiveValueProviderFactory[] primaryValueProviderFactories = RTreeUtils
+				.comparatorFactoriesToPrimitiveValueProviderFactories(primaryComparatorFactories);
+		
+		primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		
 		loadPrimaryIndexTest();
 	}
 
@@ -157,7 +158,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, primaryValueProviderFactories,
+				primaryComparatorFactories,
 				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
index cc92e97..27797a6 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeSecondaryIndexSearchOperatorTest.java
@@ -66,6 +66,7 @@
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
@@ -92,15 +93,12 @@
 	private int primaryKeyFieldCount = 4;
 	private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
 	private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
-	private IPrimitiveValueProviderFactory[] primaryValueProviderFactories = new IPrimitiveValueProviderFactory[primaryKeyFieldCount];
 
 	private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			primaryTypeTraits);
 
-	private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
-	private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			primaryTupleWriterFactory, primaryKeyFieldCount);
+	private ITreeIndexFrameFactory primaryInteriorFrameFactory;
+	private ITreeIndexFrameFactory primaryLeafFrameFactory;
 
 	private static String primaryRTreeName = "primary"
 			+ simpleDateFormat.format(new Date());
@@ -163,10 +161,8 @@
 	private RTreeTypeAwareTupleWriterFactory secondaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 			secondaryTypeTraits);
 
-	private ITreeIndexFrameFactory secondaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
-			secondaryTupleWriterFactory, secondaryKeyFieldCount);
-	private ITreeIndexFrameFactory secondaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
-			secondaryTupleWriterFactory, secondaryKeyFieldCount);
+	private ITreeIndexFrameFactory secondaryInteriorFrameFactory;
+	private ITreeIndexFrameFactory secondaryLeafFrameFactory;
 
 	private static String secondaryRTreeName = "secondary"
 			+ simpleDateFormat.format(new Date());
@@ -197,11 +193,15 @@
 		primaryComparatorFactories[1] = primaryComparatorFactories[0];
 		primaryComparatorFactories[2] = primaryComparatorFactories[0];
 		primaryComparatorFactories[3] = primaryComparatorFactories[0];
-		primaryValueProviderFactories[0] = DoublePrimitiveValueProviderFactory.INSTANCE;
-		primaryValueProviderFactories[1] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[2] = primaryValueProviderFactories[0];
-		primaryValueProviderFactories[3] = primaryValueProviderFactories[0];
 
+		IPrimitiveValueProviderFactory[] primaryValueProviderFactories = RTreeUtils
+				.comparatorFactoriesToPrimitiveValueProviderFactories(primaryComparatorFactories);
+		
+		primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
+				primaryTupleWriterFactory, primaryValueProviderFactories);
+		
 		// field, type and key declarations for primary B-tree index
 		primaryBTreeTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
 		primaryBTreeTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
@@ -230,6 +230,14 @@
 		secondaryValueProviderFactories[2] = secondaryValueProviderFactories[0];
 		secondaryValueProviderFactories[3] = secondaryValueProviderFactories[0];
 
+		IPrimitiveValueProviderFactory[] secondaryValueProviderFactories = RTreeUtils
+				.comparatorFactoriesToPrimitiveValueProviderFactories(secondaryComparatorFactories);
+		
+		secondaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+				secondaryTupleWriterFactory, secondaryValueProviderFactories);
+		secondaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
+				secondaryTupleWriterFactory, secondaryValueProviderFactories);
+		
 		loadPrimaryIndexTest();
 		loadPrimaryBTreeIndexTest();
 		loadSecondaryIndexTest();
@@ -292,7 +300,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryBTreeSplitProvider, primaryBTreeInteriorFrameFactory,
 				primaryBTreeLeafFrameFactory, primaryBTreeTypeTraits,
-				primaryBTreeComparatorFactories, null, fieldPermutation, 0.7f,
+				primaryBTreeComparatorFactories, fieldPermutation, 0.7f,
 				bTreeopHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryBTreeBulkLoad, NC1_ID);
@@ -338,7 +346,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				primaryRTreeSplitProvider, primaryInteriorFrameFactory,
 				primaryLeafFrameFactory, primaryTypeTraits,
-				primaryComparatorFactories, secondaryValueProviderFactories,
+				primaryComparatorFactories,
 				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				primaryRTreeBulkLoad, NC1_ID);
@@ -391,7 +399,7 @@
 				spec, storageManager, treeIndexRegistryProvider,
 				secondaryRTreeSplitProvider, secondaryInteriorFrameFactory,
 				secondaryLeafFrameFactory, secondaryTypeTraits,
-				secondaryComparatorFactories, secondaryValueProviderFactories,
+				secondaryComparatorFactories,
 				fieldPermutation, 0.7f, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryRTreeBulkLoad, NC1_ID);
@@ -443,7 +451,7 @@
 				treeIndexRegistryProvider, secondaryRTreeSplitProvider,
 				secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
 				secondaryTypeTraits, secondaryComparatorFactories,
-				secondaryValueProviderFactories, keyFields, opHelperFactory);
+				keyFields, opHelperFactory);
 		PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
 				secondaryRTreeSearchOp, NC1_ID);
 
diff --git a/hyracks-examples/text-example/.project b/hyracks-examples/text-example/.project
deleted file mode 100644
index 4e057cd..0000000
--- a/hyracks-examples/text-example/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>text-example</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/text-example/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/text-example/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/text-example/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/text-example/textapp/.classpath b/hyracks-examples/text-example/textapp/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-examples/text-example/textapp/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/text-example/textapp/.project b/hyracks-examples/text-example/textapp/.project
deleted file mode 100644
index 4f3af14..0000000
--- a/hyracks-examples/text-example/textapp/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>textapp</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/text-example/textapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/text-example/textapp/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 692202d..0000000
--- a/hyracks-examples/text-example/textapp/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,13 +0,0 @@
-#Thu May 19 22:55:12 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.debug.lineNumber=generate
-org.eclipse.jdt.core.compiler.debug.localVariable=generate
-org.eclipse.jdt.core.compiler.debug.sourceFile=generate
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/text-example/textapp/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/text-example/textapp/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/text-example/textapp/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/text-example/textapp/pom.xml b/hyracks-examples/text-example/textapp/pom.xml
index 1e4e862..3b3b9fd 100644
--- a/hyracks-examples/text-example/textapp/pom.xml
+++ b/hyracks-examples/text-example/textapp/pom.xml
@@ -11,6 +11,35 @@
   </parent>
 
   <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/text-example/textclient/.classpath b/hyracks-examples/text-example/textclient/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/text-example/textclient/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/text-example/textclient/.project b/hyracks-examples/text-example/textclient/.project
deleted file mode 100644
index 04307d3..0000000
--- a/hyracks-examples/text-example/textclient/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>textclient</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index dfac000..0000000
--- a/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:05 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/text-example/textclient/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/text-example/textclient/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/text-example/textclient/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/text-example/texthelper/.classpath b/hyracks-examples/text-example/texthelper/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/text-example/texthelper/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/text-example/texthelper/.project b/hyracks-examples/text-example/texthelper/.project
deleted file mode 100644
index 19ce234..0000000
--- a/hyracks-examples/text-example/texthelper/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>texthelper</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index dfac000..0000000
--- a/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:05 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/text-example/texthelper/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/text-example/texthelper/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 4562b1a..0000000
--- a/hyracks-examples/text-example/texthelper/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Sep 28 14:37:42 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/tpch-example/.project b/hyracks-examples/tpch-example/.project
deleted file mode 100644
index dfa44a1..0000000
--- a/hyracks-examples/tpch-example/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>tpch-example</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/tpch-example/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/tpch-example/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index e6f9a9e..0000000
--- a/hyracks-examples/tpch-example/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Sun Aug 29 19:38:10 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/tpch-example/tpchapp/.classpath b/hyracks-examples/tpch-example/tpchapp/.classpath
deleted file mode 100644
index 3f62785..0000000
--- a/hyracks-examples/tpch-example/tpchapp/.classpath
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/tpch-example/tpchapp/.project b/hyracks-examples/tpch-example/tpchapp/.project
deleted file mode 100644
index 46037da..0000000
--- a/hyracks-examples/tpch-example/tpchapp/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>tpchapp</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index f362c73..0000000
--- a/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Sun Aug 14 10:18:14 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
diff --git a/hyracks-examples/tpch-example/tpchapp/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/tpch-example/tpchapp/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index e6f9a9e..0000000
--- a/hyracks-examples/tpch-example/tpchapp/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Sun Aug 29 19:38:10 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-examples/tpch-example/tpchapp/pom.xml b/hyracks-examples/tpch-example/tpchapp/pom.xml
index 18c8208..c65aaf5 100644
--- a/hyracks-examples/tpch-example/tpchapp/pom.xml
+++ b/hyracks-examples/tpch-example/tpchapp/pom.xml
@@ -11,6 +11,35 @@
   </parent>
 
   <build>
+    <pluginManagement>
+      <plugins>
+        <plugin>
+          <groupId>org.eclipse.m2e</groupId>
+          <artifactId>lifecycle-mapping</artifactId>
+          <version>1.0.0</version>
+          <configuration>
+            <lifecycleMappingMetadata>
+              <pluginExecutions>
+                <pluginExecution>
+                  <pluginExecutionFilter>
+                    <groupId>org.apache.maven.plugins</groupId>
+                    <artifactId>maven-dependency-plugin</artifactId>
+                    <versionRange>[1.0.0,)</versionRange>
+                    <goals>
+                      <goal>copy-dependencies</goal>
+                    </goals>
+                  </pluginExecutionFilter>
+                  <action>
+                    <ignore />
+                  </action>
+                </pluginExecution>
+              </pluginExecutions>
+            </lifecycleMappingMetadata>
+          </configuration>
+        </plugin>
+      </plugins>
+	</pluginManagement>
+  
     <plugins>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/tpch-example/tpchclient/.classpath b/hyracks-examples/tpch-example/tpchclient/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-examples/tpch-example/tpchclient/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-examples/tpch-example/tpchclient/.project b/hyracks-examples/tpch-example/tpchclient/.project
deleted file mode 100644
index b0effc6..0000000
--- a/hyracks-examples/tpch-example/tpchclient/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>tpchclient</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 375e12e..0000000
--- a/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-examples/tpch-example/tpchclient/.settings/org.maven.ide.eclipse.prefs b/hyracks-examples/tpch-example/tpchclient/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 1b13d8b..0000000
--- a/hyracks-examples/tpch-example/tpchclient/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Wed Aug 11 19:09:15 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-hadoop-compat/.classpath b/hyracks-hadoop-compat/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-hadoop-compat/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-hadoop-compat/.project b/hyracks-hadoop-compat/.project
deleted file mode 100644
index 7d50383..0000000
--- a/hyracks-hadoop-compat/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-hadoop-compat</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs b/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index dfac000..0000000
--- a/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:05 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-hadoop-compat/.settings/org.maven.ide.eclipse.prefs b/hyracks-hadoop-compat/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index e03a9fc..0000000
--- a/hyracks-hadoop-compat/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Oct 19 11:05:30 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-server/.classpath b/hyracks-server/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-server/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-server/.project b/hyracks-server/.project
deleted file mode 100644
index 8b4eef9..0000000
--- a/hyracks-server/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-server</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-storage-am-btree/.classpath b/hyracks-storage-am-btree/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-storage-am-btree/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-storage-am-btree/.project b/hyracks-storage-am-btree/.project
deleted file mode 100644
index 754745f..0000000
--- a/hyracks-storage-am-btree/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-btree</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 451c926..0000000
--- a/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,264 +0,0 @@
-#Fri May 20 19:34:05 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=0
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
-org.eclipse.jdt.core.formatter.comment.format_line_comments=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.join_lines_in_comments=true
-org.eclipse.jdt.core.formatter.join_wrapped_lines=true
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=120
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=space
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks-storage-am-btree/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-am-btree/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index ecf0da7..0000000
--- a/hyracks-storage-am-btree/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Sun Aug 29 21:59:34 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
new file mode 100644
index 0000000..a24c4d7
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IBTreeFrame extends ITreeIndexFrame {
+	public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException;
+	public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException;
+	public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException;
+	public void insertSorted(ITupleReference tuple);
+    public boolean getSmFlag();
+    public void setSmFlag(boolean smFlag);
+    public void setMultiComparator(MultiComparator cmp);
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
index 25ab167..23fdcf5 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
@@ -15,22 +15,16 @@
 
 package edu.uci.ics.hyracks.storage.am.btree.api;
 
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
-public interface IBTreeInteriorFrame extends ITreeIndexFrame {
-    public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
-    
-    public int getChildPageId(RangePredicate pred, MultiComparator srcCmp);
+public interface IBTreeInteriorFrame extends IBTreeFrame {
+    public int getChildPageId(RangePredicate pred);
 
-    public int getLeftmostChildPageId(MultiComparator cmp);
+    public int getLeftmostChildPageId();
 
-    public int getRightmostChildPageId(MultiComparator cmp);
+    public int getRightmostChildPageId();
 
     public void setRightmostChildPageId(int pageId);
 
-    public void deleteGreatest(MultiComparator cmp);
+    public void deleteGreatest();
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
index 53d892e..a079527 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -17,25 +17,20 @@
 
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
-public interface IBTreeLeafFrame extends ITreeIndexFrame {
-    public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
-    
+public interface IBTreeLeafFrame extends IBTreeFrame {
     public void setNextLeaf(int nextPage);
 
     public int getNextLeaf();
 
     public void setPrevLeaf(int prevPage);
 
-    public int getPrevLeaf();
-
-    public ITreeIndexTupleReference createTupleReference();
+    public int getPrevLeaf();    
 
     public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
-            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp);
+            FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) throws HyracksDataException;
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
index d01db11..0636968 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
@@ -16,7 +16,7 @@
 package edu.uci.ics.hyracks.storage.am.btree.api;
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -42,9 +42,9 @@
 // potentially all tuples slots would have to change their prefix slot pointers
 // all prefixes are recomputed during a reorg or compaction
 
-public interface IPrefixSlotManager {
-    public void setFrame(BTreeFieldPrefixNSMLeafFrame frame);
-
+public interface IPrefixSlotManager extends ISlotManager {
+    // TODO: Clean up interface after extending ISlotManager.
+	
     public int decodeFirstSlotField(int slot);
 
     public int decodeSecondSlotField(int slot);
@@ -58,7 +58,7 @@
     public int insertSlot(int slot, int tupleOff);
 
     // returns prefix slot number, returns TUPLE_UNCOMPRESSED if none found
-    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp);
+    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple);
 
     public int getTupleSlotStartOff();
 
@@ -78,4 +78,6 @@
 
     // functions for testing
     public void setPrefixSlot(int tupleIndex, int slot);
+    
+    public void setMultiComparator(MultiComparator cmp);
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
index 1292ff2..3f7bc30 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -22,15 +22,16 @@
 
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.storage.am.btree.api.IFrameCompressor;
 import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
 import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
 import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
 
-public class FieldPrefixCompressor implements IFrameCompressor {
+public class FieldPrefixCompressor implements ITreeIndexFrameCompressor {
 
     // minimum ratio of uncompressed tuples to total tuple to consider
     // re-compression
@@ -49,8 +50,9 @@
     }
 
     @Override
-    public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception {
-        int tupleCount = frame.getTupleCount();
+    public boolean compress(ITreeIndexFrame indexFrame, MultiComparator cmp) throws Exception {
+        BTreeFieldPrefixNSMLeafFrame frame = (BTreeFieldPrefixNSMLeafFrame)indexFrame;
+    	int tupleCount = frame.getTupleCount();
         if (tupleCount <= 0) {
             frame.setPrefixTupleCount(0);
             frame.setFreeSpaceOff(frame.getOrigFreeSpaceOff());
@@ -58,13 +60,17 @@
             return false;
         }
 
+        if (cmp.getKeyFieldCount() == 1) {
+            return false;
+        }
+        
         int uncompressedTupleCount = frame.getUncompressedTupleCount();
         float ratio = (float) uncompressedTupleCount / (float) tupleCount;
         if (ratio < ratioThreshold)
             return false;
 
         IBinaryComparator[] cmps = cmp.getComparators();
-        int fieldCount = cmp.getKeyFieldCount();
+        int fieldCount = typeTraits.length;
 
         ByteBuffer buf = frame.getBuffer();
         byte[] pageArray = buf.array();
@@ -102,7 +108,7 @@
 
         // we use a greedy heuristic to solve this "knapsack"-like problem
         // (every keyPartition has a space savings and a number of slots
-        // required, but we the number of slots are constrained by
+        // required, but the number of slots are constrained by
         // MAX_PREFIX_SLOTS)
         // we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded
         // and later choose the top MAX_PREFIX_SLOTS
@@ -161,7 +167,6 @@
         uncompressedTupleCount = 0;
 
         TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-
         FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
         tupleToWrite.setFieldCount(fieldCount);
 
@@ -312,8 +317,7 @@
 
         // copy new tuple and new slots into original page
         int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
-        System
-                .arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace
+        System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace
                         - freeSpaceAfterInit);
 
         // copy prefix slots
@@ -368,7 +372,7 @@
     private ArrayList<KeyPartition> getKeyPartitions(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp,
             int occurrenceThreshold) {
         IBinaryComparator[] cmps = cmp.getComparators();
-        int fieldCount = cmp.getKeyFieldCount();
+        int fieldCount = typeTraits.length;
 
         int maxCmps = cmps.length - 1;
         ByteBuffer buf = frame.getBuffer();
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
index eaf5a81..09a809b 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
@@ -1,7 +1,6 @@
 package edu.uci.ics.hyracks.storage.am.btree.dataflow;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
@@ -12,7 +11,6 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 
 public class BTreeOpHelper extends TreeIndexOpHelper {
@@ -30,11 +28,7 @@
         ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, indexFileId, 0,
                 metaDataFrameFactory);
-        return new BTree(bufferCache, freePageManager, opDesc.getTreeIndexInteriorFactory(),
-                opDesc.getTreeIndexLeafFactory(), cmp);
-    }
-
-    public MultiComparator createMultiComparator(IBinaryComparator[] comparators) throws HyracksDataException {
-        return new MultiComparator(opDesc.getTreeIndexTypeTraits(), comparators);
+        return new BTree(bufferCache, opDesc.getTreeIndexFieldCount(), cmp, freePageManager, opDesc.getTreeIndexInteriorFactory(),
+                opDesc.getTreeIndexLeafFactory());
     }
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
index c542bb0..b381b88 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
@@ -49,7 +49,7 @@
             IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
             boolean lowKeyInclusive, boolean highKeyInclusive, ITreeIndexOpHelperFactory opHelperFactory) {
         super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
-                leafFrameFactory, typeTraits, comparatorFactories, null, opHelperFactory);
+                leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
         this.isForward = isForward;
         this.lowKeyFields = lowKeyFields;
         this.highKeyFields = highKeyFields;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
index f53bfd6..8cfd5d6 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
@@ -101,6 +101,7 @@
 
             // construct range predicate
 
+            // TODO: Can we construct the multicmps using helper methods?
             int lowKeySearchFields = btree.getMultiComparator().getComparators().length;
             int highKeySearchFields = btree.getMultiComparator().getComparators().length;
             if (lowKey != null)
@@ -112,7 +113,7 @@
             for (int i = 0; i < lowKeySearchFields; i++) {
                 lowKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
             }
-            lowKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(), lowKeySearchComparators);
+            lowKeySearchCmp = new MultiComparator(lowKeySearchComparators);
 
             if (lowKeySearchFields == highKeySearchFields) {
                 highKeySearchCmp = lowKeySearchCmp;
@@ -121,8 +122,7 @@
                 for (int i = 0; i < highKeySearchFields; i++) {
                     highKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
                 }
-                highKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(),
-                        highKeySearchComparators);
+                highKeySearchCmp = new MultiComparator(highKeySearchComparators);
 
             }
 
@@ -130,13 +130,12 @@
                     highKeySearchCmp);
 
             writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-            tb = new ArrayTupleBuilder(btree.getMultiComparator().getFieldCount());
+            tb = new ArrayTupleBuilder(btree.getFieldCount());
             dos = tb.getDataOutput();
             appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
             appender.reset(writeBuffer, true);
 
-            opCtx = btree.createOpContext(IndexOp.SEARCH, treeIndexOpHelper.getLeafFrame(),
-                    treeIndexOpHelper.getInteriorFrame(), null);
+            opCtx = btree.createOpContext(IndexOp.SEARCH);
 
         } catch (Exception e) {
             treeIndexOpHelper.deinit();
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
similarity index 64%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
copy to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
index 8e1d0a2..d6d945f 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeDuplicateKeyException.java
@@ -13,11 +13,16 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
 
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IFrameCompressor {
-    public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+public class BTreeDuplicateKeyException extends BTreeException {
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeDuplicateKeyException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeDuplicateKeyException(String message) {
+        super(message);
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
similarity index 87%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
index 8019bcb..1e09658 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeException.java
@@ -13,14 +13,14 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
 
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 
 public class BTreeException extends TreeIndexException {
 
-    private static final long serialVersionUID = 1L;
-    private boolean handled = false;
+    protected static final long serialVersionUID = 1L;
+    protected boolean handled = false;
 
     public BTreeException(Exception e) {
         super(e);
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
similarity index 63%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
copy to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
index 8e1d0a2..81a0e79 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNonExistentKeyException.java
@@ -13,11 +13,17 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
 
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IFrameCompressor {
-    public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+public class BTreeNonExistentKeyException extends BTreeException {
+    
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeNonExistentKeyException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeNonExistentKeyException(String message) {
+        super(message);
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
similarity index 64%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
copy to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
index 8e1d0a2..73b22d8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/exceptions/BTreeNotUpdateableException.java
@@ -13,11 +13,16 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.btree.exceptions;
 
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-
-public interface IFrameCompressor {
-    public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+public class BTreeNotUpdateableException extends BTreeException {
+    private static final long serialVersionUID = 1L;
+    
+    public BTreeNotUpdateableException(Exception e) {
+        super(e);
+    }
+    
+    public BTreeNotUpdateableException(String message) {
+        super(message);
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index bd4947c..0334a7f 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -15,30 +15,28 @@
 
 package edu.uci.ics.hyracks.storage.am.btree.frames;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IFrameCompressor;
 import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
 import edu.uci.ics.hyracks.storage.am.btree.compressors.FieldPrefixCompressor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixPrefixTupleReference;
 import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
 import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameCompressor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -52,27 +50,29 @@
 public class BTreeFieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
 
     protected static final int pageLsnOff = 0; // 0
-    protected static final int tupleCountOff = pageLsnOff + 4; // 4
-    protected static final int freeSpaceOff = tupleCountOff + 4; // 8
-    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 12
-    protected static final int levelOff = totalFreeSpaceOff + 4; // 16
-    protected static final int smFlagOff = levelOff + 1; // 17
-    protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 18
-    protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 21
+    protected static final int tupleCountOff = pageLsnOff + 8; // 8
+    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
+    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
+    protected static final int smFlagOff = levelOff + 1; // 21
+    protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 22
+    protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 26
 
-    protected static final int prevLeafOff = prefixTupleCountOff + 4; // 22
-    protected static final int nextLeafOff = prevLeafOff + 4; // 26
+    protected static final int prevLeafOff = prefixTupleCountOff + 4; // 30
+    protected static final int nextLeafOff = prevLeafOff + 4; // 34
 
     protected ICachedPage page = null;
     protected ByteBuffer buf = null;
-    public IFrameCompressor compressor;
-    public IPrefixSlotManager slotManager; // TODO: should be protected, but
-    // will trigger some refactoring
 
-    private ITreeIndexTupleWriter tupleWriter;
+    public final ITreeIndexFrameCompressor compressor;
+    // TODO: Should be protected, but will trigger some refactoring.
+    public final IPrefixSlotManager slotManager;
 
-    private FieldPrefixTupleReference frameTuple;
-    private FieldPrefixPrefixTupleReference framePrefixTuple;
+    private final ITreeIndexTupleWriter tupleWriter;
+    private MultiComparator cmp;
+    
+    private final FieldPrefixTupleReference frameTuple;
+    private final FieldPrefixPrefixTupleReference framePrefixTuple;
 
     public BTreeFieldPrefixNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
         this.tupleWriter = tupleWriter;
@@ -101,7 +101,7 @@
     }
 
     @Override
-    public boolean compress(MultiComparator cmp) throws HyracksDataException {
+    public boolean compress() throws HyracksDataException {
         try {
             return compressor.compress(this, cmp);
         } catch (Exception e) {
@@ -115,11 +115,9 @@
     // 3. prefix tuple are sorted (last prefix tuple is at highest offset)
     // this procedure will not move prefix tuples
     @Override
-    public boolean compact(MultiComparator cmp) {
+    public boolean compact() {
         resetSpaceParams();
 
-        frameTuple.setFieldCount(cmp.getFieldCount());
-
         int tupleCount = buf.getInt(tupleCountOff);
 
         // determine start of target free space (depends on assumptions stated
@@ -168,82 +166,63 @@
             slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
             freeSpace += tupleLength;
         }
-        
+
         buf.putInt(freeSpaceOff, freeSpace);
         int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
                 - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
         buf.putInt(totalFreeSpaceOff, totalFreeSpace);
-        
+
         return false;
     }
 
     @Override
-    public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
-        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+    public void delete(ITupleReference tuple, int slot) {
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        if (tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT) {
-            throw new BTreeException("Key to be deleted does not exist.");
+        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
+
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotEndOff = slotManager.getTupleSlotEndOff();
+        int length = tupleSlotOff - slotEndOff;
+        System.arraycopy(buf.array(), slotEndOff, buf.array(), slotEndOff + slotManager.getSlotSize(), length);
+
+        // maintain space information, get size of tuple suffix (suffix
+        // could be entire tuple)
+        int tupleSize = 0;
+        int suffixFieldStart = 0;
+        if (prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            suffixFieldStart = 0;
+            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) - 1);
         } else {
-            int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
-            int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
-
-            if (exactDelete) {
-                frameTuple.setFieldCount(cmp.getFieldCount());
-                frameTuple.resetByTupleIndex(this, tupleIndex);
-
-                int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
-                        .getFieldCount()
-                        - cmp.getKeyFieldCount());
-                if (comparison != 0) {
-                    throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
-                }
-            }
-
-            // perform deletion (we just do a memcpy to overwrite the slot)
-            int slotEndOff = slotManager.getTupleSlotEndOff();
-            int length = tupleSlotOff - slotEndOff;
-            System.arraycopy(buf.array(), slotEndOff, buf.array(), slotEndOff + slotManager.getSlotSize(), length);
-
-            // maintain space information, get size of tuple suffix (suffix
-            // could be entire tuple)
-            int tupleSize = 0;
-            int suffixFieldStart = 0;
-            if (prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-                suffixFieldStart = 0;
-                buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) - 1);
-            } else {
-                int prefixSlot = buf.getInt(slotManager.getPrefixSlotOff(prefixSlotNum));
-                suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
-            }
-
-            frameTuple.resetByTupleIndex(this, tupleIndex);
-            tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
-                    - suffixFieldStart);
-
-            buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-            buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+            int prefixSlot = buf.getInt(slotManager.getPrefixSlotOff(prefixSlotNum));
+            suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
         }
+
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
+                - suffixFieldStart);
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
     }
 
     @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
         int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
                 - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
 
         int bytesRequired = tupleWriter.bytesRequired(tuple);
 
-        // see if the tuple would fit uncompressed
+        // See if the tuple would fit uncompressed.
         if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
             return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
 
-        // see if tuple would fit into remaining space after compaction
+        // See if tuple would fit into remaining space after compaction.
         if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
             return FrameOpSpaceStatus.SUFFICIENT_SPACE;
 
-        // see if the tuple matches a prefix and will fit after truncating the
-        // prefix
-        int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple, cmp);
+        // See if the tuple matches a prefix and will fit after truncating the prefix.
+        int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple);
         if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
             int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
             int prefixSlot = buf.getInt(prefixSlotOff);
@@ -259,11 +238,96 @@
     }
 
     @Override
-    public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
-        // TODO Auto-generated method stub
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+        int numPrefixFields = 0;
+        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
+            int prefixSlot = buf.getInt(prefixSlotOff);
+            numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+        } else {
+            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
+        }
+
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
+                tuple.getFieldCount() - numPrefixFields, buf, freeSpace);
+
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
+    
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
+        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        
+        int oldTupleBytes = 0;
+        int newTupleBytes = 0;
+        
+        int numPrefixFields = frameTuple.getNumPrefixFields();
+        int numFields = frameTuple.getFieldCount();
+        if (numPrefixFields != 0) {
+            // Check the space requirements for updating the suffix of the original tuple.            
+            oldTupleBytes = frameTuple.getSuffixTupleSize();
+            newTupleBytes = tupleWriter.bytesRequired(newTuple, numPrefixFields, numFields - numPrefixFields); 
+        } else {
+            // The original tuple is uncompressed.
+            oldTupleBytes = frameTuple.getTupleSize();
+            newTupleBytes = tupleWriter.bytesRequired(newTuple);
+        }
+        
+        int additionalBytesRequired = newTupleBytes - oldTupleBytes;
+        // Enough space for an in-place update?
+        if (additionalBytesRequired <= 0) {
+            return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
+        }
+        
+        int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
+                - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+        
+        // Enough space if we delete the old tuple and insert the new one without compaction? 
+        if (newTupleBytes <= freeContiguous) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        // Enough space if we delete the old tuple and compact?
+        if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
         return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
     }
 
+    @Override
+    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
+        int tupleIndex = slotManager.decodeSecondSlotField(oldTupleIndex);
+        int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
+        int tupleSlot = buf.getInt(tupleSlotOff);
+        int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
+        int suffixTupleStartOff = slotManager.decodeSecondSlotField(tupleSlot);                
+        
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int numFields = frameTuple.getFieldCount();
+        int numPrefixFields = frameTuple.getNumPrefixFields();
+        int oldTupleBytes = frameTuple.getSuffixTupleSize();
+        int bytesWritten = 0;        
+        
+        if (inPlace) {
+            // Overwrite the old tuple suffix in place.
+            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, numFields - numPrefixFields, buf, suffixTupleStartOff);
+        } else {
+            // Insert the new tuple suffix at the end of the free space, and change the slot value (effectively "deleting" the old tuple).
+            int newSuffixTupleStartOff = buf.getInt(freeSpaceOff);
+            bytesWritten = tupleWriter.writeTupleFields(newTuple, numPrefixFields, numFields - numPrefixFields, buf, newSuffixTupleStartOff);
+            // Update slot value using the same prefix slot num.
+            slotManager.setSlot(tupleSlotOff, slotManager.encodeSlotFields(prefixSlotNum, newSuffixTupleStartOff));
+            // Update contiguous free space pointer.
+            buf.putInt(freeSpaceOff, newSuffixTupleStartOff + bytesWritten);
+        }
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
+    }
+    
     protected void resetSpaceParams() {
         buf.putInt(freeSpaceOff, getOrigFreeSpaceOff());
         buf.putInt(totalFreeSpaceOff, getOrigTotalFreeSpace());
@@ -271,7 +335,7 @@
 
     @Override
     public void initBuffer(byte level) {
-        buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
+        buf.putLong(pageLsnOff, 0);
         // during creation
         buf.putInt(tupleCountOff, 0);
         resetSpaceParams();
@@ -292,43 +356,55 @@
     }
 
     @Override
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
-    	return slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
-    			FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+    	int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is an exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+        }
+        return slot;
     }
     
     @Override
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {        
-        int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-        int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
-        int numPrefixFields = 0;
-        if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
-            int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
-            int prefixSlot = buf.getInt(prefixSlotOff);
-            numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
-        } else {
-            buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
-        }
-
-        int freeSpace = buf.getInt(freeSpaceOff);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
-                tuple.getFieldCount() - numPrefixFields, buf, freeSpace);
-
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+        }    
+        return slot;
     }
-
+    
     @Override
-    public void update(int rid, ITupleReference tuple) throws Exception {
-        // TODO Auto-generated method stub
-
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        int tupleIndex = slotManager.decodeSecondSlotField(slot);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+        }    
+        return slot;
     }
-
+    
     @Override
-    public void printHeader() {
-        // TODO Auto-generated method stub
-
+    public String printHeader() {
+        StringBuilder strBuilder = new StringBuilder();
+        strBuilder.append("pageLsnOff:                " + pageLsnOff + "\n");
+        strBuilder.append("tupleCountOff:             " + tupleCountOff + "\n");
+        strBuilder.append("freeSpaceOff:              " + freeSpaceOff + "\n");
+        strBuilder.append("totalFreeSpaceOff:         " + totalFreeSpaceOff + "\n");
+        strBuilder.append("levelOff:                  " + levelOff + "\n");
+        strBuilder.append("smFlagOff:                 " + smFlagOff + "\n");
+        strBuilder.append("uncompressedTupleCountOff: " + uncompressedTupleCountOff + "\n");
+        strBuilder.append("prefixTupleCountOff:       " + prefixTupleCountOff + "\n");
+        strBuilder.append("prevLeafOff:               " + prevLeafOff + "\n");
+        strBuilder.append("nextLeafOff:               " + nextLeafOff + "\n");
+        return strBuilder.toString();
     }
 
     @Override
@@ -341,26 +417,6 @@
     }
 
     @Override
-    public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        int tupleCount = buf.getInt(tupleCountOff);
-        frameTuple.setFieldCount(fields.length);
-        for (int i = 0; i < tupleCount; i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
-                ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
-                        .getFieldStart(j), frameTuple.getFieldLength(j));
-                DataInput dataIn = new DataInputStream(inStream);
-                Object o = fields[j].deserialize(dataIn);
-                strBuilder.append(o.toString() + " ");
-            }
-            strBuilder.append(" | ");
-        }
-        strBuilder.append("\n");
-        return strBuilder.toString();
-    }
-
-    @Override
     public int getTupleOffset(int slotNum) {
         int tupleSlotOff = slotManager.getTupleSlotOff(slotNum);
         int tupleSlot = buf.getInt(tupleSlotOff);
@@ -368,13 +424,13 @@
     }
 
     @Override
-    public int getPageLsn() {
-        return buf.getInt(pageLsnOff);
+    public long getPageLsn() {
+        return buf.getLong(pageLsnOff);
     }
 
     @Override
-    public void setPageLsn(int pageLsn) {
-        buf.putInt(pageLsnOff, pageLsn);
+    public void setPageLsn(long pageLsn) {
+        buf.putLong(pageLsnOff, pageLsn);
     }
 
     @Override
@@ -386,7 +442,7 @@
     public boolean isLeaf() {
         return buf.get(levelOff) == 0;
     }
-    
+
     @Override
     public boolean isInterior() {
         return buf.get(levelOff) > 0;
@@ -424,7 +480,7 @@
     }
 
     @Override
-    public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+    public void insertSorted(ITupleReference tuple) {
         int freeSpace = buf.getInt(freeSpaceOff);
         int fieldsToTruncate = 0;
 
@@ -445,7 +501,7 @@
             prefixSlotNum = buf.getInt(prefixTupleCountOff) - 1;
         else
             buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
-        int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_SLOT);
+        int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_KEY_INDICATOR);
         slotManager.insertSlot(insSlot, freeSpace);
 
         // update page metadata
@@ -455,17 +511,16 @@
     }
 
     @Override
-    public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
-            throws Exception {
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey)
+    		throws TreeIndexException {
 
-        BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
+        BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame)rightFrame;
 
-        frameTuple.setFieldCount(cmp.getFieldCount());
-        
         ByteBuffer right = rf.getBuffer();
         int tupleCount = getTupleCount();
         int prefixTupleCount = getPrefixTupleCount();
 
+        // Find split point, and determine into which frame the new tuple should be inserted into.
         int tuplesToLeft;
         int midSlotNum = tupleCount / 2;
         ITreeIndexFrame targetFrame = null;
@@ -522,7 +577,7 @@
 
                     int bytesWritten = 0;
                     if (lastPrefixSlotNum != prefixSlotNum) {
-                        bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right, freeSpace);
+                        bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
                         int newPrefixSlot = rf.slotManager
                                 .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
                         int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
@@ -567,12 +622,12 @@
         buf.putInt(prefixTupleCountOff, prefixesToLeft);
 
         // compact both pages
-        compact(cmp);
-        rightFrame.compact(cmp);
+        compact();
+        rightFrame.compact();
 
         // insert last key
-        int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
-        targetFrame.insert(tuple, cmp, targetTupleIndex);
+        int targetTupleIndex = ((IBTreeLeafFrame)targetFrame).findInsertTupleIndex(tuple);
+        targetFrame.insert(tuple, targetTupleIndex);
 
         // set split key to be highest value in left page
         frameTuple.resetByTupleIndex(this, getTupleCount() - 1);
@@ -581,8 +636,6 @@
         splitKey.initData(splitKeySize);
         tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
         splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-
-        return 0;
     }
 
     @Override
@@ -632,11 +685,6 @@
         return slotManager.getSlotSize();
     }
 
-    @Override
-    public void setPageTupleFieldCount(int fieldCount) {
-        frameTuple.setFieldCount(fieldCount);
-    }
-
     public ITreeIndexTupleWriter getTupleWriter() {
         return tupleWriter;
     }
@@ -651,14 +699,21 @@
             FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
         int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
         int tupleIndex = slotManager.decodeSecondSlotField(slot);
-        if (tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT)
+        // TODO: Revisit this one. Maybe there is a cleaner way to solve this in the RangeSearchCursor.
+        if (tupleIndex == FieldPrefixSlotManager.GREATEST_KEY_INDICATOR || tupleIndex == FieldPrefixSlotManager.ERROR_INDICATOR)
             return -1;
         else
             return tupleIndex;
     }
 
     @Override
-	public int getPageHeaderSize() {
-		return nextLeafOff;
+    public int getPageHeaderSize() {
+        return nextLeafOff;
+    }
+
+	@Override
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
+		this.slotManager.setMultiComparator(cmp);
 	}
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
index 05b43d3..5defb27 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
@@ -22,8 +22,8 @@
 public class BTreeFieldPrefixNSMLeafFrameFactory implements ITreeIndexFrameFactory {
 
     private static final long serialVersionUID = 1L;
-    private ITreeIndexTupleWriterFactory tupleWriterFactory;
-
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    
     public BTreeFieldPrefixNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
         this.tupleWriterFactory = tupleWriterFactory;
     }
@@ -32,4 +32,9 @@
     public IBTreeLeafFrame createFrame() {
         return new BTreeFieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
     }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+       return tupleWriterFactory;
+    }
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
new file mode 100644
index 0000000..6ff44be
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeLeafFrameType.java
@@ -0,0 +1,6 @@
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+public enum BTreeLeafFrameType {
+    REGULAR_NSM,
+    FIELD_PREFIX_COMPRESSED_NSM
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index 6025003..6173440 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -15,23 +15,19 @@
 
 package edu.uci.ics.hyracks.storage.am.btree.frames;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -42,20 +38,14 @@
 public class BTreeNSMInteriorFrame extends TreeIndexNSMFrame implements IBTreeInteriorFrame {
 
     private static final int rightLeafOff = smFlagOff + 1;
-
     private static final int childPtrSize = 4;
-    
-    // private SimpleTupleReference cmpFrameTuple = new SimpleTupleReference();
-    private ITreeIndexTupleReference cmpFrameTuple;
+
+    private final ITreeIndexTupleReference cmpFrameTuple;
+    private MultiComparator cmp;
 
     public BTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter) {
         super(tupleWriter, new OrderedSlotManager());
         cmpFrameTuple = tupleWriter.createTupleReference();
-
-    }
-
-    private int getLeftChildPageOff(ITupleReference tuple, MultiComparator cmp) {
-        return tuple.getFieldStart(cmp.getKeyFieldCount() - 1) + tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
     }
 
     @Override
@@ -65,99 +55,138 @@
     }
 
     @Override
-    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
-        int bytesRequired = tupleWriter.bytesRequired(tuple) + 8; // for the two
-        // childpointers
-        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
-                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
-            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-        else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
-            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-        else
-            return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-    }
-
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
-    	frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        boolean isDuplicate = true;
-
-        if (tupleIndex < 0)
-            isDuplicate = false; // greater than all existing keys
-        else {
-            frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
-            if (cmp.compare(tuple, frameTuple) != 0)
-                isDuplicate = false;
-        }
-        if (isDuplicate) {
-            throw new BTreeException("Trying to insert duplicate value into interior node.");
-        }
-        return tupleIndex;
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
     }
     
     @Override
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
-    	int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-    	int freeSpace = buf.getInt(freeSpaceOff);
-    	int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
-    	System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf
-    			.array(), freeSpace + bytesWritten, childPtrSize);
-    	int tupleSize = bytesWritten + childPtrSize;
-
-    	buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-    	buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
-    	buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-
-    	// did insert into the rightmost slot?
-    	if (slotOff == slotManager.getSlotEndOff()) {
-    		System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
-    				+ childPtrSize, buf.array(), rightLeafOff, childPtrSize);
-    	} else {
-    		// if slotOff has a right (slot-)neighbor then update its child
-    		// pointer
-    		// the only time when this is NOT the case, is when this is the
-    		// first tuple
-    		// (or when the splitkey goes into the rightmost slot but that
-    		// case was handled in the if above)
-    		if (buf.getInt(tupleCountOff) > 1) {
-    			int rightNeighborOff = slotOff - slotManager.getSlotSize();
-    			frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
-    			System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize,
-    					buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
-    		}
-    	}
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        // Tuple bytes + child pointer + slot.
+        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize + slotManager.getSlotSize();
+        if (bytesRequired <= getFreeContiguousSpace()) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        if (bytesRequired <= getTotalFreeSpace()) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
     }
 
-
     @Override
-    public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
         int freeSpace = buf.getInt(freeSpaceOff);
-        slotManager.insertSlot(-1, freeSpace);
-        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
-        System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf.array(),
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf, freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
                 freeSpace + bytesWritten, childPtrSize);
         int tupleSize = bytesWritten + childPtrSize;
         buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
         buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
         buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-        System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(),
-                rightLeafOff, childPtrSize);
+        // Did we insert into the rightmost slot?
+        if (slotOff == slotManager.getSlotEndOff()) {
+            System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple) + childPtrSize,
+                    buf.array(), rightLeafOff, childPtrSize);
+        } else {
+            // If slotOff has a right (slot-)neighbor then update its child
+            // pointer.
+            // The only time when this is NOT the case, is when this is the
+            // very first tuple (or when the splitkey goes into the rightmost
+            // slot but that
+            // case is handled in the if above).
+            if (buf.getInt(tupleCountOff) > 1) {
+                int rightNeighborOff = slotOff - slotManager.getSlotSize();
+                frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
+                System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(),
+                        getLeftChildPageOff(frameTuple), childPtrSize);
+            }
+        }
+    }
+    
+    @Override
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.INCLUSIVE,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
     }
 
     @Override
-    public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
-            throws Exception {
-        // before doing anything check if key already exists
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        
-        ByteBuffer right = rightFrame.getBuffer();
-        int tupleCount = buf.getInt(tupleCountOff);
+    public void delete(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+        int tupleOff;
+        int keySize;
+        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
+            tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            keySize = frameTuple.getTupleSize();
+            // Copy new rightmost pointer.
+            System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+        } else {
+            tupleOff = slotManager.getTupleOff(slotOff);
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            keySize = frameTuple.getTupleSize();
+            // Perform deletion (we just do a memcpy to overwrite the slot).
+            int slotStartOff = slotManager.getSlotEndOff();
+            int length = slotOff - slotStartOff;
+            System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+        }
+        // Maintain space information.
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
+    }
+    
+    @Override
+    public void deleteGreatest() {
+        int slotOff = slotManager.getSlotEndOff();
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int keySize = tupleWriter.bytesRequired(frameTuple);
+        System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+        // Maintain space information.
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
+        int freeSpace = buf.getInt(freeSpaceOff);
+        if (freeSpace == tupleOff + keySize + childPtrSize) {
+            buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
+        }
+    }
+    
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference tuple, int oldTupleIndex) {
+        throw new UnsupportedOperationException("Cannot update tuples in interior node.");
+    }
+    
+    @Override
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        throw new UnsupportedOperationException("Cannot update tuples in interior node.");
+    }
 
+    @Override
+    public void insertSorted(ITupleReference tuple) {
+        int freeSpace = buf.getInt(freeSpaceOff);
+        slotManager.insertSlot(slotManager.getGreatestKeyIndicator(), freeSpace);
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getLeftChildPageOff(tuple), buf.array(),
+                freeSpace + bytesWritten, childPtrSize);
+        int tupleSize = bytesWritten + childPtrSize;
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+        System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple) + childPtrSize, buf.array(), rightLeafOff,
+                childPtrSize);
+    }
+
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+        ByteBuffer right = rightFrame.getBuffer();
+        int tupleCount = getTupleCount();
+        
+        // Find split point, and determine into which frame the new tuple should be inserted into.
         int tuplesToLeft = (tupleCount / 2) + (tupleCount % 2);
         ITreeIndexFrame targetFrame = null;
-        frameTuple.resetByTupleOffset(buf, getTupleOffset(tuplesToLeft - 1));
+        frameTuple.resetByTupleIndex(this, tuplesToLeft - 1);
         if (cmp.compare(tuple, frameTuple) <= 0) {
             targetFrame = this;
         } else {
@@ -165,10 +194,10 @@
         }
         int tuplesToRight = tupleCount - tuplesToLeft;
 
-        // copy entire page
+        // Copy entire page.
         System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
 
-        // on right page we need to copy rightmost slots to left
+        // On the right page we need to copy rightmost slots to left.
         int src = rightFrame.getSlotManager().getSlotEndOff();
         int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
                 * rightFrame.getSlotManager().getSlotSize();
@@ -176,48 +205,44 @@
         System.arraycopy(right.array(), src, right.array(), dest, length);
         right.putInt(tupleCountOff, tuplesToRight);
 
-        // on left page, remove highest key and make its childpointer the
-        // rightmost childpointer
+        // On the left page, remove the highest key and make its child pointer
+        // the rightmost child pointer.
         buf.putInt(tupleCountOff, tuplesToLeft);
 
-        // copy data to be inserted, we need this because creating the splitkey
-        // will overwrite the data param (data points to same memory as
-        // splitKey.getData())
+        // Copy the split key to be inserted.
+        // We must do so because setting the new split key will overwrite the
+        // old split key, and we cannot insert the existing split key at this point.
         ISplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
 
-        // set split key to be highest value in left page
+        // Set split key to be highest value in left page.
         int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
         frameTuple.resetByTupleOffset(buf, tupleOff);
         int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
         splitKey.initData(splitKeySize);
-        tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
+        tupleWriter.writeTuple(frameTuple, splitKey.getBuffer(), 0);
         splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
 
         int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
         frameTuple.resetByTupleOffset(buf, deleteTupleOff);
-        buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple, cmp)));
+        buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple)));
         buf.putInt(tupleCountOff, tuplesToLeft - 1);
 
-        // compact both pages
-        rightFrame.compact(cmp);
-        compact(cmp);
+        // Compact both pages.
+        rightFrame.compact();
+        compact();
 
-        // insert key
-        int targetTupleIndex = targetFrame.findTupleIndex(savedSplitKey.getTuple(), cmp);
-        targetFrame.insert(savedSplitKey.getTuple(), cmp, targetTupleIndex);
-        
-        return 0;
+        // Insert the saved split key.
+        int targetTupleIndex = ((BTreeNSMInteriorFrame) targetFrame)
+                .findInsertTupleIndex(savedSplitKey.getTuple());
+        targetFrame.insert(savedSplitKey.getTuple(), targetTupleIndex);
     }
 
     @Override
-    public boolean compact(MultiComparator cmp) {
+    public boolean compact() {
         resetSpaceParams();
-
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-
         int tupleCount = buf.getInt(tupleCountOff);
         int freeSpace = buf.getInt(freeSpaceOff);
-
+        // Sort the slots by the tuple offset they point to.
         ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
         sortedTupleOffs.ensureCapacity(tupleCount);
         for (int i = 0; i < tupleCount; i++) {
@@ -226,133 +251,109 @@
             sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
         }
         Collections.sort(sortedTupleOffs);
-
+        // Iterate over the sorted slots, and move their corresponding tuples to
+        // the left, reclaiming free space.
         for (int i = 0; i < sortedTupleOffs.size(); i++) {
             int tupleOff = sortedTupleOffs.get(i).tupleOff;
             frameTuple.resetByTupleOffset(buf, tupleOff);
-
             int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
                     + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
             int tupleLength = tupleEndOff - tupleOff + childPtrSize;
             System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
             slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
             freeSpace += tupleLength;
         }
-
+        // Update contiguous free space pointer and total free space indicator.
         buf.putInt(freeSpaceOff, freeSpace);
         buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-        
         return false;
     }
 
     @Override
-    public int getChildPageId(RangePredicate pred, MultiComparator srcCmp) {
-        // check for trivial case where there is only a child pointer (and no
-        // key)
+    public int getChildPageId(RangePredicate pred) {
+        // Trivial case where there is only a child pointer (and no key).
         if (buf.getInt(tupleCountOff) == 0) {
             return buf.getInt(rightLeafOff);
         }
-
-        cmpFrameTuple.setFieldCount(srcCmp.getKeyFieldCount());
-        frameTuple.setFieldCount(srcCmp.getKeyFieldCount());
-
-        // check for trivial cases where no low key or high key exists (e.g.
-        // during an index scan)
+        // Trivial cases where no low key or high key was given (e.g.
+        // during an index scan).
         ITupleReference tuple = null;
         FindTupleMode fsm = null;
+        // The target comparator may be on a prefix of the BTree key fields.
         MultiComparator targetCmp = null;
-        if (pred.isForward()) {
+        if (pred.isForward()) {            
             tuple = pred.getLowKey();
             if (tuple == null) {
-                return getLeftmostChildPageId(srcCmp);
+                return getLeftmostChildPageId();
             }
-            if (pred.isLowKeyInclusive())
-                fsm = FindTupleMode.FTM_INCLUSIVE;
-            else
-                fsm = FindTupleMode.FTM_EXCLUSIVE;
+            if (pred.isLowKeyInclusive()) {
+                fsm = FindTupleMode.INCLUSIVE;
+            } else {
+                fsm = FindTupleMode.EXCLUSIVE;
+            }
             targetCmp = pred.getLowKeyComparator();
         } else {
             tuple = pred.getHighKey();
             if (tuple == null) {
-                return getRightmostChildPageId(srcCmp);
+                return getRightmostChildPageId();
             }
-            if (pred.isHighKeyInclusive())
-                fsm = FindTupleMode.FTM_EXCLUSIVE;
-            else
-                fsm = FindTupleMode.FTM_INCLUSIVE;
+            if (pred.isHighKeyInclusive()) {
+                fsm = FindTupleMode.EXCLUSIVE;
+            } else {
+                fsm = FindTupleMode.INCLUSIVE;
+            }
             targetCmp = pred.getHighKeyComparator();
         }
-
+        // Search for a matching key.
         int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
         int slotOff = slotManager.getSlotOff(tupleIndex);
-        if (tupleIndex < 0) {
+        // Follow the rightmost (greatest) child pointer.
+        if (tupleIndex == slotManager.getGreatestKeyIndicator()) {
             return buf.getInt(rightLeafOff);
-        } else {
-            int origTupleOff = slotManager.getTupleOff(slotOff);
-            cmpFrameTuple.resetByTupleOffset(buf, origTupleOff);
-            int cmpTupleOff = origTupleOff;
-            if (pred.isForward()) {
-                int maxSlotOff = buf.capacity();
-                slotOff += slotManager.getSlotSize();
-                while (slotOff < maxSlotOff) {
-                    cmpTupleOff = slotManager.getTupleOff(slotOff);
-                    frameTuple.resetByTupleOffset(buf, cmpTupleOff);
-                    if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
-                        break;
-                    slotOff += slotManager.getSlotSize();
-                }
-                slotOff -= slotManager.getSlotSize();
-            } else {
-                int minSlotOff = slotManager.getSlotEndOff() - slotManager.getSlotSize();
-                slotOff -= slotManager.getSlotSize();
-                while (slotOff > minSlotOff) {
-                    cmpTupleOff = slotManager.getTupleOff(slotOff);
-                    frameTuple.resetByTupleOffset(buf, cmpTupleOff);
-                    if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
-                        break;
-                    slotOff -= slotManager.getSlotSize();
+        }
+        // Deal with prefix searches.
+        // slotManager.findTupleIndex() will return an arbitrary tuple matching
+        // the given field prefix (according to the target comparator).
+        // To make sure we traverse the right path, we must find the
+        // leftmost or rightmost tuple that matches the prefix.
+        int origTupleOff = slotManager.getTupleOff(slotOff);
+        cmpFrameTuple.resetByTupleOffset(buf, origTupleOff);
+        int cmpTupleOff = origTupleOff;
+        if (pred.isForward()) {
+            // The answer set begins with the lowest key matching the prefix.
+            // We must follow the child pointer of the lowest (leftmost) key
+            // matching the given prefix.
+            int maxSlotOff = buf.capacity();
+            slotOff += slotManager.getSlotSize();
+            while (slotOff < maxSlotOff) {
+                cmpTupleOff = slotManager.getTupleOff(slotOff);
+                frameTuple.resetByTupleOffset(buf, cmpTupleOff);
+                if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0) {
+                    break;
                 }
                 slotOff += slotManager.getSlotSize();
             }
-            
-            frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
-            int childPageOff = getLeftChildPageOff(frameTuple, srcCmp);
-            return buf.getInt(childPageOff);
-        }
-    }
-
-    @Override
-    public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        int tupleOff;
-        int keySize;
-
-        if (tupleIndex < 0) {
-            tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-
-            // copy new rightmost pointer
-            System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+            slotOff -= slotManager.getSlotSize();
         } else {
-            tupleOff = slotManager.getTupleOff(slotOff);
-            frameTuple.resetByTupleOffset(buf, tupleOff);
-            keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-            // perform deletion (we just do a memcpy to overwrite the slot)
-            int slotStartOff = slotManager.getSlotEndOff();
-            int length = slotOff - slotStartOff;
-            System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+            // The answer set begins with the highest key matching the prefix.
+            // We must follow the child pointer of the highest (rightmost) key
+            // matching the given prefix.
+            int minSlotOff = slotManager.getSlotEndOff() - slotManager.getSlotSize();
+            slotOff -= slotManager.getSlotSize();
+            while (slotOff > minSlotOff) {
+                cmpTupleOff = slotManager.getTupleOff(slotOff);
+                frameTuple.resetByTupleOffset(buf, cmpTupleOff);
+                if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0) {
+                    break;
+                }
+                slotOff -= slotManager.getSlotSize();
+            }
+            slotOff += slotManager.getSlotSize();
         }
-
-        // maintain space information
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
-                + slotManager.getSlotSize());
+        frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
+        int childPageOff = getLeftChildPageOff(frameTuple);
+        return buf.getInt(childPageOff);
     }
 
     @Override
@@ -362,16 +363,15 @@
     }
 
     @Override
-    public int getLeftmostChildPageId(MultiComparator cmp) {
+    public int getLeftmostChildPageId() {
         int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
         frameTuple.resetByTupleOffset(buf, tupleOff);
-        int childPageOff = getLeftChildPageOff(frameTuple, cmp);
+        int childPageOff = getLeftChildPageOff(frameTuple);
         return buf.getInt(childPageOff);
     }
 
     @Override
-    public int getRightmostChildPageId(MultiComparator cmp) {
+    public int getRightmostChildPageId() {
         return buf.getInt(rightLeafOff);
     }
 
@@ -380,7 +380,44 @@
         buf.putInt(rightLeafOff, pageId);
     }
 
-    // for debugging
+    @Override
+    public int getPageHeaderSize() {
+        return rightLeafOff;
+    }
+
+    private int getLeftChildPageOff(ITupleReference tuple) {
+        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
+    }
+
+    @Override
+    public boolean getSmFlag() {
+        return buf.get(smFlagOff) != 0;
+    }
+
+    @Override
+    public void setSmFlag(boolean smFlag) {
+        if (smFlag) {
+            buf.put(smFlagOff, (byte) 1);
+        } else {
+            buf.put(smFlagOff, (byte) 0);
+        }
+    }
+
+    @Override
+    public void setMultiComparator(MultiComparator cmp) {
+        this.cmp = cmp;
+        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+    }
+    
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        ITreeIndexTupleReference tuple = tupleWriter.createTupleReference();
+        tuple.setFieldCount(cmp.getKeyFieldCount());
+        return tuple;
+    }
+    
+    // For debugging.
     public ArrayList<Integer> getChildren(MultiComparator cmp) {
         ArrayList<Integer> ret = new ArrayList<Integer>();
         frameTuple.setFieldCount(cmp.getKeyFieldCount());
@@ -388,8 +425,10 @@
         for (int i = 0; i < tupleCount; i++) {
             int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
             frameTuple.resetByTupleOffset(buf, tupleOff);
-            int intVal = getInt(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
-                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+            int intVal = IntegerSerializerDeserializer.getInt(
+                    buf.array(),
+                    frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                            + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
             ret.add(intVal);
         }
         if (!isLeaf()) {
@@ -399,54 +438,4 @@
         }
         return ret;
     }
-
-    @Override
-    public void deleteGreatest(MultiComparator cmp) {
-        int slotOff = slotManager.getSlotEndOff();
-        int tupleOff = slotManager.getTupleOff(slotOff);
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        frameTuple.resetByTupleOffset(buf, tupleOff);
-        int keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-        System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
-
-        // maintain space information
-        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
-                + slotManager.getSlotSize());
-
-        int freeSpace = buf.getInt(freeSpaceOff);
-        if (freeSpace == tupleOff + keySize + childPtrSize) {
-            buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
-        }
-    }
-
-    private int getInt(byte[] bytes, int offset) {
-        return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
-                + ((bytes[offset + 3] & 0xff) << 0);
-    }
-
-    @Override
-    public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
-        StringBuilder strBuilder = new StringBuilder();
-        int tupleCount = buf.getInt(tupleCountOff);
-        frameTuple.setFieldCount(cmp.getKeyFieldCount());
-        for (int i = 0; i < tupleCount; i++) {
-            frameTuple.resetByTupleIndex(this, i);
-            for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
-                ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
-                        .getFieldStart(j), frameTuple.getFieldLength(j));
-                DataInput dataIn = new DataInputStream(inStream);
-                Object o = fields[j].deserialize(dataIn);
-                strBuilder.append(o.toString() + " ");
-            }
-            strBuilder.append(" | ");
-        }
-        strBuilder.append("\n");
-        return strBuilder.toString();
-    }
-    
-    @Override
-	public int getPageHeaderSize() {
-		return rightLeafOff;
-	}
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
index 6b30ee0..8618df8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
@@ -22,7 +22,7 @@
 public class BTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
 
     private static final long serialVersionUID = 1L;
-    private ITreeIndexTupleWriterFactory tupleWriterFactory;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
 
     public BTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
         this.tupleWriterFactory = tupleWriterFactory;
@@ -32,4 +32,9 @@
     public IBTreeInteriorFrame createFrame() {
         return new BTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter());
     }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+        return tupleWriterFactory;
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index 85fbec9..4856595 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -17,14 +17,15 @@
 
 import java.nio.ByteBuffer;
 
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -33,7 +34,8 @@
 public class BTreeNSMLeafFrame extends TreeIndexNSMFrame implements IBTreeLeafFrame {
     protected static final int prevLeafOff = smFlagOff + 1;
     protected static final int nextLeafOff = prevLeafOff + 4;
-
+    private MultiComparator cmp;
+    
     public BTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
         super(tupleWriter, new OrderedSlotManager());
     }
@@ -66,56 +68,59 @@
     }
 
     @Override
-    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
-    	frameTuple.setFieldCount(cmp.getFieldCount());
-        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
-                FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-        int slotOff = slotManager.getSlotOff(tupleIndex);
-        boolean isDuplicate = true;
-
-        if (tupleIndex < 0)
-            isDuplicate = false; // greater than all existing keys
-        else {
-            frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
-            if (cmp.compare(tuple, frameTuple) != 0)
-                isDuplicate = false;
+    public int findInsertTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is an exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
         }
-        if (isDuplicate) {
-            throw new BTreeException("Trying to insert duplicate value into leaf of unique index");
-        }
-        
         return tupleIndex;
     }
     
     @Override
-    public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {    	
-    	slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-    	int freeSpace = buf.getInt(freeSpaceOff);
-    	int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
-    	buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-    	buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-    	buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    public int findUpdateTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to update a tuple with a nonexistent key in leaf node.");
+        }        
+        return tupleIndex;
+    }
+    
+    @Override
+    public int findDeleteTupleIndex(ITupleReference tuple) throws TreeIndexException {
+        int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.EXACT,
+                FindTupleNoExactMatchPolicy.HIGHER_KEY);
+        // Error indicator is set if there is no exact match.
+        if (tupleIndex == slotManager.getErrorIndicator()) {
+            throw new BTreeNonExistentKeyException("Trying to delete a tuple with a nonexistent key in leaf node.");
+        }        
+        return tupleIndex;
     }
 
     @Override
-    public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+    public void insert(ITupleReference tuple, int tupleIndex) {
         int freeSpace = buf.getInt(freeSpaceOff);
-        slotManager.insertSlot(-1, freeSpace);
-        int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+        slotManager.insertSlot(tupleIndex, freeSpace);        
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
         buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
         buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
         buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
     }
 
     @Override
-    public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
-            throws Exception {
+    public void insertSorted(ITupleReference tuple) {
+        insert(tuple, slotManager.getGreatestKeyIndicator());
+    }
 
-        frameTuple.setFieldCount(cmp.getFieldCount());
-        
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
         ByteBuffer right = rightFrame.getBuffer();
-        int tupleCount = getTupleCount();
-
+        int tupleCount = getTupleCount();        
+        
+        // Find split point, and determine into which frame the new tuple should be inserted into.
         int tuplesToLeft;
         int mid = tupleCount / 2;
         ITreeIndexFrame targetFrame = null;
@@ -130,10 +135,10 @@
         }
         int tuplesToRight = tupleCount - tuplesToLeft;
 
-        // copy entire page
+        // Copy entire page.
         System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
 
-        // on right page we need to copy rightmost slots to left
+        // On the right page we need to copy rightmost slots to the left.
         int src = rightFrame.getSlotManager().getSlotEndOff();
         int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
                 * rightFrame.getSlotManager().getSlotSize();
@@ -141,27 +146,24 @@
         System.arraycopy(right.array(), src, right.array(), dest, length);
         right.putInt(tupleCountOff, tuplesToRight);
 
-        // on left page only change the tupleCount indicator
+        // On left page only change the tupleCount indicator.
         buf.putInt(tupleCountOff, tuplesToLeft);
 
-        // compact both pages
-        rightFrame.compact(cmp);
-        compact(cmp);
+        // Compact both pages.
+        rightFrame.compact();
+        compact();
 
-        // insert last key
-        int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
-        targetFrame.insert(tuple, cmp, targetTupleIndex);
+        // Insert the new tuple.
+        int targetTupleIndex = ((BTreeNSMLeafFrame)targetFrame).findInsertTupleIndex(tuple);
+        targetFrame.insert(tuple, targetTupleIndex);
 
-        // set split key to be highest value in left page
+        // Set the split key to be highest key in the left page.
         tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
         frameTuple.resetByTupleOffset(buf, tupleOff);
-
         int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
         splitKey.initData(splitKeySize);
         tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
         splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-        
-        return 0;
     }
 
     @Override
@@ -181,8 +183,27 @@
         return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
     }
 
+    @Override
+    public int getPageHeaderSize() {
+        return nextLeafOff;
+    }
+
+    @Override
+    public boolean getSmFlag() {
+        return buf.get(smFlagOff) != 0;
+    }
+
+    @Override
+    public void setSmFlag(boolean smFlag) {
+        if (smFlag) {
+            buf.put(smFlagOff, (byte) 1);
+        } else {
+            buf.put(smFlagOff, (byte) 0);
+        }
+    }
+    
 	@Override
-	public int getPageHeaderSize() {
-		return nextLeafOff;
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
 	}
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
index d59b391..9508df5 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
@@ -22,8 +22,8 @@
 public class BTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
 
     private static final long serialVersionUID = 1L;
-    private ITreeIndexTupleWriterFactory tupleWriterFactory;
-
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    
     public BTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
         this.tupleWriterFactory = tupleWriterFactory;
     }
@@ -32,4 +32,9 @@
     public IBTreeLeafFrame createFrame() {
         return new BTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter());
     }
+
+    @Override
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+        return tupleWriterFactory;
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
index 89a70e0..5f507f5 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
@@ -15,12 +15,7 @@
 
 package edu.uci.ics.hyracks.storage.am.btree.frames;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.frames.AbstractSlotManager;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
@@ -32,8 +27,9 @@
 	@Override
     public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
             FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
-        if (frame.getTupleCount() <= 0)
-            return -1;
+        if (frame.getTupleCount() <= 0) {
+            return GREATEST_KEY_INDICATOR;
+        }
 
         int mid;
         int begin = 0;
@@ -49,43 +45,53 @@
             } else if (cmp > 0) {
                 begin = mid + 1;
             } else {
-                if (mode == FindTupleMode.FTM_EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
                         begin = mid + 1;
-                    else
+                    } else {
                         end = mid - 1;
+                    }
                 } else {
-                    return mid;
+                    if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
+                        return ERROR_INDICATOR;
+                    } else {
+                        return mid;
+                    }
                 }
             }
         }
 
-        if (mode == FindTupleMode.FTM_EXACT)
-            return -1;
+        if (mode == FindTupleMode.EXACT) {
+            return ERROR_INDICATOR;
+        }
 
-        if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
-            if (begin > frame.getTupleCount() - 1)
-                return -1;
+        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
+            if (begin > frame.getTupleCount() - 1) {
+                return GREATEST_KEY_INDICATOR;
+            }
             frameTuple.resetByTupleIndex(frame, begin);
-            if (multiCmp.compare(searchKey, frameTuple) < 0)
+            if (multiCmp.compare(searchKey, frameTuple) < 0) {
                 return begin;
-            else
-                return -1;
+            } else {
+                return GREATEST_KEY_INDICATOR;
+            }
         } else {
-            if (end < 0)
-                return -1;
+            if (end < 0) {
+                return GREATEST_KEY_INDICATOR;
+            }
             frameTuple.resetByTupleIndex(frame, end);
-            if (multiCmp.compare(searchKey, frameTuple) > 0)
+            if (multiCmp.compare(searchKey, frameTuple) > 0) {
                 return end;
-            else
-                return -1;
+            } else {
+                return GREATEST_KEY_INDICATOR;
+            }
         }
     }
     
     @Override
     public int insertSlot(int tupleIndex, int tupleOff) {
         int slotOff = getSlotOff(tupleIndex);
-        if (tupleIndex < 0) {
+        if (tupleIndex == GREATEST_KEY_INDICATOR) {
             slotOff = getSlotEndOff() - slotSize;
             setSlot(slotOff, tupleOff);
             return slotOff;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 1869a1b..3952f5d 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -22,11 +22,15 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
@@ -38,8 +42,8 @@
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
@@ -48,207 +52,95 @@
 
     public static final float DEFAULT_FILL_FACTOR = 0.7f;
 
-    private final static int RESTART_OP = Integer.MIN_VALUE;
+    private final static long RESTART_OP = Long.MIN_VALUE;
     private final static int MAX_RESTARTS = 10;
-
-    // the root page never changes
-    private final int rootPage = 1;
+    private final static int rootPage = 1;
+        
+    private boolean created = false;
 
     private final IFreePageManager freePageManager;
-
-    private boolean created = false;
-    private boolean loaded = false;
-
-    private final IBufferCache bufferCache;
-    private int fileId;
+    private final IBufferCache bufferCache;    
     private final ITreeIndexFrameFactory interiorFrameFactory;
     private final ITreeIndexFrameFactory leafFrameFactory;
+    private final int fieldCount;
     private final MultiComparator cmp;
     private final ReadWriteLock treeLatch;
     private final RangePredicate diskOrderScanPredicate;
+    private int fileId;
 
-    public int rootSplits = 0;
-    public int[] splitsByLevel = new int[500];
-    public long readLatchesAcquired = 0;
-    public long readLatchesReleased = 0;
-    public long writeLatchesAcquired = 0;
-    public long writeLatchesReleased = 0;
-    public long pins = 0;
-    public long unpins = 0;
-
-    public long treeLatchesAcquired = 0;
-    public long treeLatchesReleased = 0;
-
-    public byte currentLevel = 0;
-
-    public int usefulCompression = 0;
-    public int uselessCompression = 0;
-
-    public void treeLatchStatus() {
-        System.out.println(treeLatch.writeLock().toString());
-    }
-
-    public String printStats() {
-        StringBuilder strBuilder = new StringBuilder();
-        strBuilder.append("\n");
-        strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
-        strBuilder.append("SPLITS BY LEVEL\n");
-        for (int i = 0; i < currentLevel; i++) {
-            strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
-        }
-        strBuilder.append(String.format("READ LATCHES:  %10d %10d\n", readLatchesAcquired, readLatchesReleased));
-        strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired, writeLatchesReleased));
-        strBuilder.append(String.format("PINS:          %10d %10d\n", pins, unpins));
-        return strBuilder.toString();
-    }
-
-    public BTree(IBufferCache bufferCache, IFreePageManager freePageManager,
-            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
+    public BTree(IBufferCache bufferCache, int fieldCount, MultiComparator cmp, IFreePageManager freePageManager,
+            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory) {
         this.bufferCache = bufferCache;
-        this.interiorFrameFactory = interiorFrameFactory;
-        this.leafFrameFactory = leafFrameFactory;
+        this.fieldCount = fieldCount;
         this.cmp = cmp;
+        this.interiorFrameFactory = interiorFrameFactory;
+        this.leafFrameFactory = leafFrameFactory;        
         this.freePageManager = freePageManager;
         this.treeLatch = new ReentrantReadWriteLock(true);
         this.diskOrderScanPredicate = new RangePredicate(true, null, null, true, true, cmp, cmp);
     }
 
     @Override
-    public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
-
-        if (created)
-            return;
-
+    public void create(int fileId) throws HyracksDataException {
         treeLatch.writeLock().lock();
         try {
-
-            // check if another thread beat us to it
-            if (created)
+            if (created) {
                 return;
-
-            freePageManager.init(metaFrame, rootPage);
-
-            // initialize root page
-            ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
-            pins++;
-
-            rootNode.acquireWriteLatch();
-            writeLatchesAcquired++;
-            try {
-                leafFrame.setPage(rootNode);
-                leafFrame.initBuffer((byte) 0);
-            } finally {
-                rootNode.releaseWriteLatch();
-                writeLatchesReleased++;
-                bufferCache.unpin(rootNode);
-                unpins++;
             }
-            currentLevel = 0;
-
+            ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+            ITreeIndexMetaDataFrame metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();
+            this.fileId = fileId;
+            freePageManager.init(metaFrame, rootPage);
+            initRoot(leafFrame, true);
             created = true;
         } finally {
             treeLatch.writeLock().unlock();
         }
     }
 
+    @Override
     public void open(int fileId) {
-        this.fileId = fileId;
+    	this.fileId = fileId;
     }
 
+    @Override
     public void close() {
         fileId = -1;
     }
 
-    private void addFreePages(BTreeOpContext ctx) throws Exception {
+    private void addFreePages(BTreeOpContext ctx) throws HyracksDataException {
         for (int i = 0; i < ctx.freePages.size(); i++) {
-            // root page is special, don't add it to free pages
+            // Root page is special, never add it to free pages.
             if (ctx.freePages.get(i) != rootPage) {
                 freePageManager.addFreePage(ctx.metaFrame, ctx.freePages.get(i));
             }
         }
         ctx.freePages.clear();
     }
-
-    public void printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields)
-            throws Exception {
-        printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
-    }
-
-    public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
-            IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
-
-        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        pins++;
-        node.acquireReadLatch();
-        readLatchesAcquired++;
-
-        try {
-            if (parent != null && unpin == true) {
-                parent.releaseReadLatch();
-                readLatchesReleased++;
-
-                bufferCache.unpin(parent);
-                unpins++;
-            }
-
-            interiorFrame.setPage(node);
-            int level = interiorFrame.getLevel();
-
-            System.out.format("%1d ", level);
-            System.out.format("%3d ", pageId);
-            for (int i = 0; i < currentLevel - level; i++)
-                System.out.format("    ");
-
-            String keyString;
-            if (interiorFrame.isLeaf()) {
-                leafFrame.setPage(node);
-                keyString = leafFrame.printKeys(cmp, fields);
-            } else {
-                keyString = interiorFrame.printKeys(cmp, fields);
-            }
-
-            System.out.format(keyString);
-            if (!interiorFrame.isLeaf()) {
-                ArrayList<Integer> children = ((BTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
-
-                for (int i = 0; i < children.size(); i++) {
-                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
-                }
-            } else {
-                node.releaseReadLatch();
-                readLatchesReleased++;
-
-                bufferCache.unpin(node);
-                unpins++;
-            }
-        } catch (Exception e) {
-            node.releaseReadLatch();
-            readLatchesReleased++;
-
-            bufferCache.unpin(node);
-            unpins++;
-            e.printStackTrace();
-        }
-    }
-
+    
     @Override
-    public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
-            IndexOpContext ictx) throws HyracksDataException {
+    public void diskOrderScan(ITreeIndexCursor icursor, IIndexOpContext ictx) throws HyracksDataException {
         TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
         BTreeOpContext ctx = (BTreeOpContext) ictx;
         ctx.reset();
 
-        int currentPageId = rootPage + 1;
-        int maxPageId = freePageManager.getMaxPage(metaFrame);
+        int currentPageId = rootPage;
+        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
 
         ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
         page.acquireReadLatch();
-        cursor.setBufferCache(bufferCache);
-        cursor.setFileId(fileId);
-        cursor.setCurrentPageId(currentPageId);
-        cursor.setMaxPageId(maxPageId);
-        ctx.cursorInitialState.setPage(page);
-        cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+        try {
+            cursor.setBufferCache(bufferCache);
+            cursor.setFileId(fileId);
+            cursor.setCurrentPageId(currentPageId);
+            cursor.setMaxPageId(maxPageId);
+            ctx.cursorInitialState.setPage(page);
+            cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+        } catch (Exception e) {
+            page.releaseReadLatch();
+            bufferCache.unpin(page);
+            throw new HyracksDataException(e);
+        }
     }
 
     public void search(ITreeIndexCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
@@ -256,27 +148,25 @@
         ctx.pred = pred;
         ctx.cursor = cursor;
         // simple index scan
-        if (ctx.pred.getLowKeyComparator() == null)
+        if (ctx.pred.getLowKeyComparator() == null) {
             ctx.pred.setLowKeyComparator(cmp);
-        if (ctx.pred.getHighKeyComparator() == null)
+        }
+        if (ctx.pred.getHighKeyComparator() == null) {
             ctx.pred.setHighKeyComparator(cmp);
-
-        boolean repeatOp = true;
+        }
         // we use this loop to deal with possibly multiple operation restarts
         // due to ongoing structure modifications during the descent
+        boolean repeatOp = true;
         while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
             performOp(rootPage, null, ctx);
-
             // if we reach this stage then we need to restart from the (possibly
             // new) root
             if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
                 ctx.pageLsns.removeLast(); // pop the restart op indicator
                 continue;
             }
-
             repeatOp = false;
         }
-
         cursor.setBufferCache(bufferCache);
         cursor.setFileId(fileId);
     }
@@ -286,98 +176,81 @@
         for (int i = 0; i < ctx.smPages.size(); i++) {
             int pageId = ctx.smPages.get(i);
             ICachedPage smPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-            pins++;
-            smPage.acquireWriteLatch(); // TODO: would like to set page dirty
-            // without latching
-            writeLatchesAcquired++;
+            smPage.acquireWriteLatch();
             try {
                 ctx.interiorFrame.setPage(smPage);
                 ctx.interiorFrame.setSmFlag(false);
             } finally {
                 smPage.releaseWriteLatch();
-                writeLatchesReleased++;
                 bufferCache.unpin(smPage);
-                unpins++;
             }
         }
         if (ctx.smPages.size() > 0) {
             treeLatch.writeLock().unlock();
-            treeLatchesReleased++;
             ctx.smPages.clear();
         }
         ctx.interiorFrame.setPage(originalPage);
     }
 
-    private void createNewRoot(BTreeOpContext ctx) throws Exception {
-        rootSplits++; // debug
-        splitsByLevel[currentLevel]++;
-        currentLevel++;
-
-        // make sure the root is always at the same level
+    private void initRoot(ITreeIndexFrame leafFrame, boolean firstInit) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), firstInit);
+        rootNode.acquireWriteLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            leafFrame.initBuffer((byte) 0);
+        } finally {
+            rootNode.releaseWriteLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    private void createNewRoot(BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        // Make sure the root is always in the same page.
         ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
                 false);
-        pins++;
-        leftNode.acquireWriteLatch(); // TODO: think about whether latching is
-        // really required
-        writeLatchesAcquired++;
+        leftNode.acquireWriteLatch();
         try {
             ICachedPage rightNode = bufferCache.pin(
                     BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getRightPage()), false);
-            pins++;
-            rightNode.acquireWriteLatch(); // TODO: think about whether latching
-            // is really required
-            writeLatchesAcquired++;
+            rightNode.acquireWriteLatch();
             try {
                 int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
                 ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
-                pins++;
-                newLeftNode.acquireWriteLatch(); // TODO: think about whether
-                // latching is really
-                // required
-                writeLatchesAcquired++;
+                newLeftNode.acquireWriteLatch();
                 try {
-                    // copy left child to new left child
+                    // Copy left child to new left child.
                     System.arraycopy(leftNode.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
                             .getBuffer().capacity());
                     ctx.interiorFrame.setPage(newLeftNode);
                     ctx.interiorFrame.setSmFlag(false);
-
-                    // change sibling pointer if children are leaves
+                    // Change sibling pointer if children are leaves.
                     ctx.leafFrame.setPage(rightNode);
                     if (ctx.leafFrame.isLeaf()) {
                         ctx.leafFrame.setPrevLeaf(newLeftId);
                     }
-
-                    // initialize new root (leftNode becomes new root)
+                    // Initialize new root (leftNode becomes new root).
                     ctx.interiorFrame.setPage(leftNode);
                     ctx.interiorFrame.initBuffer((byte) (ctx.leafFrame.getLevel() + 1));
-                    ctx.interiorFrame.setSmFlag(true); // will be cleared later
-                    // in unsetSmPages
+                    // Will be cleared later in unsetSmPages.
+                    ctx.interiorFrame.setSmFlag(true);
                     ctx.splitKey.setLeftPage(newLeftId);
-                    int targetTupleIndex = ctx.interiorFrame.findTupleIndex(ctx.splitKey.getTuple(), cmp);
-                    ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp, targetTupleIndex);
+                    int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(ctx.splitKey.getTuple());
+                    ctx.interiorFrame.insert(ctx.splitKey.getTuple(), targetTupleIndex);
                 } finally {
                     newLeftNode.releaseWriteLatch();
-                    writeLatchesReleased++;
                     bufferCache.unpin(newLeftNode);
-                    unpins++;
                 }
             } finally {
                 rightNode.releaseWriteLatch();
-                writeLatchesReleased++;
                 bufferCache.unpin(rightNode);
-                unpins++;
             }
         } finally {
             leftNode.releaseWriteLatch();
-            writeLatchesReleased++;
             bufferCache.unpin(leftNode);
-            unpins++;
         }
     }
-
-    @Override
-    public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+    
+    private void insertUpdateOrDelete(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
         BTreeOpContext ctx = (BTreeOpContext) ictx;
         ctx.reset();
         ctx.pred.setLowKeyComparator(cmp);
@@ -386,402 +259,344 @@
         ctx.pred.setHighKey(tuple, true);
         ctx.splitKey.reset();
         ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
+        // We use this loop to deal with possibly multiple operation restarts
+        // due to ongoing structure modifications during the descent.
         boolean repeatOp = true;
-        // we use this loop to deal with possibly multiple operation restarts
-        // due to ongoing structure modifications during the descent
         while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
             performOp(rootPage, null, ctx);
-
-            // if we reach this stage then we need to restart from the (possibly
-            // new) root
+            // Do we need to restart from the (possibly new) root?
             if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
                 ctx.pageLsns.removeLast(); // pop the restart op indicator
                 continue;
             }
-
-            // we split the root, here is the key for a new root
+            // Split key propagated?
             if (ctx.splitKey.getBuffer() != null) {
-                createNewRoot(ctx);
+                if (ctx.op == IndexOp.DELETE) {
+                    // Reset level of root to zero.
+                    initRoot(ctx.leafFrame, false);
+                } else {
+                    // Insert or update op. Create a new root.
+                    createNewRoot(ctx);
+                }
             }
-
             unsetSmPages(ctx);
-
+            if (ctx.op == IndexOp.DELETE) {
+                addFreePages(ctx);
+            }
             repeatOp = false;
         }
     }
+    
+    @Override
+    public void insert(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        insertUpdateOrDelete(tuple, ictx);
+    }
 
-    public long uselessCompressionTime = 0;
-
+    @Override
+    public void update(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        // This call only allows updating of non-key fields.
+        // Updating a tuple's key necessitates deleting the old entry, and inserting the new entry.
+        // The user of the BTree is responsible for dealing with non-key updates (i.e., doing a delete + insert). 
+        if (fieldCount == cmp.getKeyFieldCount()) {
+            throw new BTreeNotUpdateableException("Cannot perform updates when the entire tuple forms the key.");
+        }
+        insertUpdateOrDelete(tuple, ictx);
+    }
+    
+    @Override
+    public void delete(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        insertUpdateOrDelete(tuple, ictx);
+    }
+    
     private void insertLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
         ctx.leafFrame.setPage(node);
-        ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
-
-        int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
-        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+        int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
         switch (spaceStatus) {
-
             case SUFFICIENT_CONTIGUOUS_SPACE: {
-                // System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
-                ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
                 ctx.splitKey.reset();
-            }
                 break;
-
+            }
             case SUFFICIENT_SPACE: {
-                // System.out.println("SUFFICIENT_SPACE");
-                boolean slotsChanged = ctx.leafFrame.compact(cmp);
-                if (slotsChanged)
-                    targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
-                ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+                boolean slotsChanged = ctx.leafFrame.compact();
+                if (slotsChanged) {
+                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                }
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
                 ctx.splitKey.reset();
-            }
                 break;
-
+            }
             case INSUFFICIENT_SPACE: {
-                // System.out.println("INSUFFICIENT_SPACE");
-
-                // try compressing the page first and see if there is space
-                // available
-                long start = System.currentTimeMillis();
-                boolean reCompressed = ctx.leafFrame.compress(cmp);
-                long end = System.currentTimeMillis();
-                if (reCompressed)
-                    spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
-
+                // Try compressing the page first and see if there is space available.
+                boolean reCompressed = ctx.leafFrame.compress();
+                if (reCompressed) {
+                    // Compression could have changed the target tuple index, find it again.
+                    targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                    spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
+                }
                 if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
-                    ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+                    ctx.leafFrame.insert(tuple, targetTupleIndex);
                     ctx.splitKey.reset();
-
-                    usefulCompression++;
                 } else {
+                    performLeafSplit(pageId, tuple, ctx);
+                }
+                break;
+            }
+        }
+        node.releaseWriteLatch();
+        bufferCache.unpin(node);
+    }
+    
+    private void performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+        int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
+        ICachedPage rightSibling = null;
+        if (rightSiblingPageId > 0) {
+            rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
+                    false);
+        }
+        // Lock is released in unsetSmPages(), after sm has fully completed.
+        treeLatch.writeLock().lock(); 
+        try {
+            int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+            ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+                    true);
+            rightNode.acquireWriteLatch();
+            try {
+                IBTreeLeafFrame rightFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();                
+                rightFrame.setPage(rightNode);
+                rightFrame.initBuffer((byte) 0);
+                rightFrame.setMultiComparator(cmp);
+                ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
 
-                    uselessCompressionTime += (end - start);
-                    uselessCompression++;
+                ctx.smPages.add(pageId);
+                ctx.smPages.add(rightPageId);
+                ctx.leafFrame.setSmFlag(true);
+                rightFrame.setSmFlag(true);
 
-                    // perform split
-                    splitsByLevel[0]++; // debug
-                    int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
-                    ICachedPage rightSibling = null;
-                    if (rightSiblingPageId > 0) {
-                        rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
-                                false);
-                        pins++;
-                    }
+                rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
+                rightFrame.setPrevLeaf(pageId);
+                ctx.leafFrame.setNextLeaf(rightPageId);
 
-                    treeLatch.writeLock().lock(); // lock is released in
-                    // unsetSmPages(), after sm has
-                    // fully completed
-                    treeLatchesAcquired++;
+                // TODO: we just use increasing numbers as pageLsn,
+                // we
+                // should tie this together with the LogManager and
+                // TransactionManager
+                rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+                ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+
+                ctx.splitKey.setPages(pageId, rightPageId);
+                
+                if (rightSibling != null) {
+                    rightSibling.acquireWriteLatch();
                     try {
-
-                        int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-                        ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
-                                true);
-                        pins++;
-                        rightNode.acquireWriteLatch();
-                        writeLatchesAcquired++;
-                        try {
-                            IBTreeLeafFrame rightFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-                            rightFrame.setPage(rightNode);
-                            rightFrame.initBuffer((byte) 0);
-                            rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-
-                            int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
-
-                            ctx.smPages.add(pageId);
-                            ctx.smPages.add(rightPageId);
-                            ctx.leafFrame.setSmFlag(true);
-                            rightFrame.setSmFlag(true);
-
-                            rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
-                            rightFrame.setPrevLeaf(pageId);
-                            ctx.leafFrame.setNextLeaf(rightPageId);
-
-                            // TODO: we just use increasing numbers as pageLsn,
-                            // we
-                            // should tie this together with the LogManager and
-                            // TransactionManager
-                            rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
-                            ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
-
-                            if (ret != 0) {
-                                ctx.splitKey.reset();
-                            } else {
-                                // System.out.print("LEAF SPLITKEY: ");
-                                // cmp.printKey(splitKey.getData(), 0);
-                                // System.out.println("");
-
-                                ctx.splitKey.setPages(pageId, rightPageId);
-                            }
-                            if (rightSibling != null) {
-                                rightSibling.acquireWriteLatch();
-                                writeLatchesAcquired++;
-                                try {
-                                    rightFrame.setPage(rightSibling); // reuse
-                                    // rightFrame
-                                    // for
-                                    // modification
-                                    rightFrame.setPrevLeaf(rightPageId);
-                                } finally {
-                                    rightSibling.releaseWriteLatch();
-                                    writeLatchesReleased++;
-                                }
-                            }
-                        } finally {
-                            rightNode.releaseWriteLatch();
-                            writeLatchesReleased++;
-                            bufferCache.unpin(rightNode);
-                            unpins++;
-                        }
-                    } catch (Exception e) {
-                        treeLatch.writeLock().unlock();
-                        treeLatchesReleased++;
-                        throw e;
+                        // Reuse rightFrame for modification.
+                        rightFrame.setPage(rightSibling);
+                        rightFrame.setPrevLeaf(rightPageId);
                     } finally {
-                        if (rightSibling != null) {
-                            bufferCache.unpin(rightSibling);
-                            unpins++;
-                        }
+                        rightSibling.releaseWriteLatch();
                     }
                 }
+            } finally {
+                rightNode.releaseWriteLatch();
+                bufferCache.unpin(rightNode);
             }
-                break;
-
+        } catch (Exception e) {
+            treeLatch.writeLock().unlock();
+            throw e;
+        } finally {
+            if (rightSibling != null) {
+                bufferCache.unpin(rightSibling);
+            }
         }
-
+    }
+    
+    private void updateLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+        ctx.leafFrame.setPage(node);
+        int oldTupleIndex = ctx.leafFrame.findUpdateTupleIndex(tuple);
+        FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceUpdate(tuple, oldTupleIndex);
+        switch (spaceStatus) {
+            case SUFFICIENT_INPLACE_SPACE: {
+                ctx.leafFrame.update(tuple, oldTupleIndex, true);
+                ctx.splitKey.reset();
+                break;
+            }
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                ctx.leafFrame.update(tuple, oldTupleIndex, false);
+                ctx.splitKey.reset();
+                break;
+            }                
+            case SUFFICIENT_SPACE: {
+                // Delete the old tuple, compact the frame, and insert the new tuple.
+                ctx.leafFrame.delete(tuple, oldTupleIndex);
+                ctx.leafFrame.compact();
+                int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                ctx.leafFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
+            }                
+            case INSUFFICIENT_SPACE: {
+                // Delete the old tuple, and try compressing the page to make space available.
+                ctx.leafFrame.delete(tuple, oldTupleIndex);
+                ctx.leafFrame.compress();
+                // We need to insert the new tuple, so check if there is space.
+                spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);                
+                if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+                    int targetTupleIndex = ctx.leafFrame.findInsertTupleIndex(tuple);
+                    ctx.leafFrame.insert(tuple, targetTupleIndex);
+                    ctx.splitKey.reset();
+                } else {
+                    performLeafSplit(pageId, tuple, ctx);
+                }
+                break;
+            }
+        }
         node.releaseWriteLatch();
-        writeLatchesReleased++;
         bufferCache.unpin(node);
-        unpins++;
     }
 
     private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
             throws Exception {
         ctx.interiorFrame.setPage(node);
-        ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-
-        int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
-        FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+        int targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
+        FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
         switch (spaceStatus) {
             case INSUFFICIENT_SPACE: {
-                splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
                 int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
                 ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
-                pins++;
                 rightNode.acquireWriteLatch();
-                writeLatchesAcquired++;
                 try {
-                    ITreeIndexFrame rightFrame = interiorFrameFactory.createFrame();
+                    IBTreeFrame rightFrame = (IBTreeFrame)interiorFrameFactory.createFrame();
                     rightFrame.setPage(rightNode);
                     rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
-                    rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+                    rightFrame.setMultiComparator(cmp);
                     // instead of creating a new split key, use the existing
                     // splitKey
-                    int ret = ctx.interiorFrame.split(rightFrame, ctx.splitKey.getTuple(), cmp, ctx.splitKey);
-
+                    ctx.interiorFrame.split(rightFrame, ctx.splitKey.getTuple(), ctx.splitKey);
                     ctx.smPages.add(pageId);
                     ctx.smPages.add(rightPageId);
                     ctx.interiorFrame.setSmFlag(true);
                     rightFrame.setSmFlag(true);
-
                     // TODO: we just use increasing numbers as pageLsn, we
                     // should tie this together with the LogManager and
                     // TransactionManager
                     rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
                     ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
 
-                    if (ret != 0) {
-                        ctx.splitKey.reset();
-                    } else {
-                        // System.out.print("INTERIOR SPLITKEY: ");
-                        // cmp.printKey(splitKey.getData(), 0);
-                        // System.out.println("");
-
-                        ctx.splitKey.setPages(pageId, rightPageId);
-                    }
+                    ctx.splitKey.setPages(pageId, rightPageId);
                 } finally {
                     rightNode.releaseWriteLatch();
-                    writeLatchesReleased++;
                     bufferCache.unpin(rightNode);
-                    unpins++;
                 }
-            }
                 break;
+            }                
 
             case SUFFICIENT_CONTIGUOUS_SPACE: {
-                // System.out.println("INSERT INTERIOR: " + pageId);
-                ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
+                ctx.interiorFrame.insert(tuple, targetTupleIndex);
                 ctx.splitKey.reset();
-            }
                 break;
+            }
 
             case SUFFICIENT_SPACE: {
-                boolean slotsChanged = ctx.interiorFrame.compact(cmp);
-                if (slotsChanged)
-                    targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
-                ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
-                ctx.splitKey.reset();
-            }
-                break;
-
-        }
-    }
-
-    @Override
-    public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
-        BTreeOpContext ctx = (BTreeOpContext) ictx;
-        ctx.reset();
-        ctx.pred.setLowKeyComparator(cmp);
-        ctx.pred.setHighKeyComparator(cmp);
-        ctx.pred.setLowKey(tuple, true);
-        ctx.pred.setHighKey(tuple, true);
-        ctx.splitKey.reset();
-        ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
-        boolean repeatOp = true;
-        // we use this loop to deal with possibly multiple operation restarts
-        // due to ongoing structure modifications during the descent
-        while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-            performOp(rootPage, null, ctx);
-
-            // if we reach this stage then we need to restart from the (possibly
-            // new) root
-            if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
-                ctx.pageLsns.removeLast(); // pop the restart op indicator
-                continue;
-            }
-
-            // tree is empty, reset level to zero
-            if (ctx.splitKey.getBuffer() != null) {
-                ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
-                pins++;
-                rootNode.acquireWriteLatch();
-                writeLatchesAcquired++;
-                try {
-                    ctx.leafFrame.setPage(rootNode);
-                    ctx.leafFrame.initBuffer((byte) 0);
-                    currentLevel = 0; // debug
-                } finally {
-                    rootNode.releaseWriteLatch();
-                    writeLatchesReleased++;
-                    bufferCache.unpin(rootNode);
-                    unpins++;
+                boolean slotsChanged = ctx.interiorFrame.compact();
+                if (slotsChanged) {
+                    targetTupleIndex = ctx.interiorFrame.findInsertTupleIndex(tuple);
                 }
+                ctx.interiorFrame.insert(tuple, targetTupleIndex);
+                ctx.splitKey.reset();
+                break;
             }
-
-            unsetSmPages(ctx);
-
-            addFreePages(ctx);
-
-            repeatOp = false;
         }
     }
 
-    // TODO: to avoid latch deadlock, must modify cursor to detect empty leaves
     private void deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
         ctx.leafFrame.setPage(node);
+        int tupleIndex = ctx.leafFrame.findDeleteTupleIndex(tuple);
 
-        // will this leaf become empty?
-        if (ctx.leafFrame.getTupleCount() == 1) {
-            IBTreeLeafFrame siblingFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-
-            ICachedPage leftNode = null;
-            ICachedPage rightNode = null;
-            int nextLeaf = ctx.leafFrame.getNextLeaf();
-            int prevLeaf = ctx.leafFrame.getPrevLeaf();
-
-            if (prevLeaf > 0)
-                leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, prevLeaf), false);
-
+        // Will this leaf become empty?
+        if (ctx.leafFrame.getTupleCount() > 1) {
+            // Leaf will not become empty.
+            ctx.leafFrame.delete(tuple, tupleIndex);
+            node.releaseWriteLatch();
+            bufferCache.unpin(node);
+            return;
+        }
+        
+        // Leaf will become empty. 
+        IBTreeLeafFrame siblingFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        siblingFrame.setMultiComparator(cmp);
+        ICachedPage leftNode = null;
+        ICachedPage rightNode = null;
+        int nextLeaf = ctx.leafFrame.getNextLeaf();
+        int prevLeaf = ctx.leafFrame.getPrevLeaf();
+        if (prevLeaf > 0) {
+            leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, prevLeaf), false);
+        }
+        try {
+            if (nextLeaf > 0) {
+                rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeaf), false);
+            }
             try {
-
-                if (nextLeaf > 0)
-                    rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeaf), false);
-
+                treeLatch.writeLock().lock();
                 try {
-                    treeLatch.writeLock().lock();
-                    treeLatchesAcquired++;
-
-                    try {
-                        ctx.leafFrame.delete(tuple, cmp, true);
-                        // to propagate the deletion we only need to make the
-                        // splitKey != null
-                        // we can reuse data to identify which key to delete in
-                        // the parent
-                        ctx.splitKey.initData(1);
-                    } catch (Exception e) {
-                        // don't propagate deletion upwards if deletion at this
-                        // level fails
-                        ctx.splitKey.reset();
-                        throw e;
-                    }
-
-                    // TODO: tie together with loggins
-                    ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
-                    ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
-
-                    ctx.smPages.add(pageId);
-                    ctx.leafFrame.setSmFlag(true);
-
-                    node.releaseWriteLatch();
-                    writeLatchesReleased++;
-                    bufferCache.unpin(node);
-                    unpins++;
-
-                    if (leftNode != null) {
-                        leftNode.acquireWriteLatch();
-                        try {
-                            siblingFrame.setPage(leftNode);
-                            siblingFrame.setNextLeaf(nextLeaf);
-                            siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
-                            // tie
-                            // together
-                            // with
-                            // logging
-                        } finally {
-                            leftNode.releaseWriteLatch();
-                        }
-                    }
-
-                    if (rightNode != null) {
-                        rightNode.acquireWriteLatch();
-                        try {
-                            siblingFrame.setPage(rightNode);
-                            siblingFrame.setPrevLeaf(prevLeaf);
-                            siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
-                            // tie
-                            // together
-                            // with
-                            // logging
-                        } finally {
-                            rightNode.releaseWriteLatch();
-                        }
-                    }
-
-                    // register pageId as a free
-                    ctx.freePages.add(pageId);
-
+                    ctx.leafFrame.delete(tuple, tupleIndex);
+                    // To propagate the deletion we only need to make the
+                    // splitKey != null.
+                    // Reuse data to identify which key to delete in the parent.
+                    ctx.splitKey.initData(1);
                 } catch (Exception e) {
-                    treeLatch.writeLock().unlock();
-                    treeLatchesReleased++;
+                    // Don't propagate deletion.
+                    ctx.splitKey.reset();
                     throw e;
-                } finally {
-                    if (rightNode != null) {
-                        bufferCache.unpin(rightNode);
+                }
+
+                // TODO: Tie together with logging.
+                ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+                ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
+
+                ctx.smPages.add(pageId);
+                ctx.leafFrame.setSmFlag(true);
+
+                node.releaseWriteLatch();
+                bufferCache.unpin(node);
+
+                if (leftNode != null) {
+                    leftNode.acquireWriteLatch();
+                    try {
+                        siblingFrame.setPage(leftNode);
+                        siblingFrame.setNextLeaf(nextLeaf);
+                        // TODO: Tie together with logging.
+                        siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1);
+                    } finally {
+                        leftNode.releaseWriteLatch();
                     }
                 }
+
+                if (rightNode != null) {
+                    rightNode.acquireWriteLatch();
+                    try {
+                        siblingFrame.setPage(rightNode);
+                        siblingFrame.setPrevLeaf(prevLeaf);
+                        // TODO: Tie together with logging.
+                        siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1);
+                    } finally {
+                        rightNode.releaseWriteLatch();
+                    }
+                }
+                // Register pageId as a free.
+                ctx.freePages.add(pageId);
+            } catch (Exception e) {
+                treeLatch.writeLock().unlock();
+                throw e;
             } finally {
-                if (leftNode != null) {
-                    bufferCache.unpin(leftNode);
+                if (rightNode != null) {
+                    bufferCache.unpin(rightNode);
                 }
             }
-        } else { // leaf will not become empty
-            ctx.leafFrame.delete(tuple, cmp, true);
-            node.releaseWriteLatch();
-            writeLatchesReleased++;
-            bufferCache.unpin(node);
-            unpins++;
+        } finally {
+            if (leftNode != null) {
+                bufferCache.unpin(leftNode);
+            }
         }
     }
 
@@ -789,6 +604,8 @@
             throws Exception {
         ctx.interiorFrame.setPage(node);
 
+        int tupleIndex = ctx.interiorFrame.findDeleteTupleIndex(tuple);
+        
         // this means there is only a child pointer but no key, this case
         // propagates the split
         if (ctx.interiorFrame.getTupleCount() == 0) {
@@ -806,59 +623,53 @@
             ctx.freePages.add(pageId);
 
         } else {
-            ctx.interiorFrame.delete(tuple, cmp, false);
-            ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1); // TODO:
-            // tie
-            // together
-            // with
-            // logging
-            ctx.splitKey.reset(); // don't propagate deletion
+            ctx.interiorFrame.delete(tuple, tupleIndex);
+            // TODO: Tie together with logging.
+            ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
+            // Don't propagate deletion.
+            ctx.splitKey.reset();
         }
     }
 
     private final void acquireLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
-        if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+        if (isLeaf && (op == IndexOp.INSERT || op == IndexOp.DELETE || op == IndexOp.UPDATE)) {
             node.acquireWriteLatch();
-            writeLatchesAcquired++;
         } else {
             node.acquireReadLatch();
-            readLatchesAcquired++;
         }
     }
 
     private final void releaseLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
-        if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+        if (isLeaf && (op == IndexOp.INSERT || op == IndexOp.DELETE || op == IndexOp.UPDATE)) {
             node.releaseWriteLatch();
-            writeLatchesReleased++;
         } else {
             node.releaseReadLatch();
-            readLatchesReleased++;
         }
     }
 
     private boolean isConsistent(int pageId, BTreeOpContext ctx) throws Exception {
         ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        pins++;
         node.acquireReadLatch();
-        readLatchesAcquired++;
         ctx.interiorFrame.setPage(node);
         boolean isConsistent = false;
         try {
             isConsistent = ctx.pageLsns.getLast() == ctx.interiorFrame.getPageLsn();
         } finally {
             node.releaseReadLatch();
-            readLatchesReleased++;
             bufferCache.unpin(node);
-            unpins++;
         }
         return isConsistent;
     }
 
-    private void performOp(int pageId, ICachedPage parent, BTreeOpContext ctx) throws Exception {
+    private void performOp(int pageId, ICachedPage parent, BTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
         ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-        pins++;
-
         ctx.interiorFrame.setPage(node);
+        
+        //System.out.println("PAGEID: " + pageId);
+        //System.out.println("NODE:   " + node);
+        //System.out.println("LEVEL:  " + ctx.interiorFrame.getLevel() + " " + ctx.interiorFrame.isLeaf());
+        //System.out.println("-------------------------");
+        
         // this check performs an unprotected read in the page
         // the following could happen: TODO fill out
         boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
@@ -870,103 +681,72 @@
         // remember trail of pageLsns, to unwind recursion in case of an ongoing
         // structure modification
         ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
-
         try {
 
             // latch coupling, note: parent should never be write latched,
             // otherwise something is wrong.
             if (parent != null) {
                 parent.releaseReadLatch();
-                readLatchesReleased++;
                 bufferCache.unpin(parent);
-                unpins++;
             }
-
             if (!isLeaf || smFlag) {
                 if (!smFlag) {
-                    // we use this loop to deal with possibly multiple operation
+                    // We use this loop to deal with possibly multiple operation
                     // restarts due to ongoing structure modifications during
-                    // the descent
+                    // the descent.
                     boolean repeatOp = true;
                     while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
-                        int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred, cmp);
+                        int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred);
                         performOp(childPageId, node, ctx);
 
                         if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
-                            ctx.pageLsns.removeLast(); // pop the restart op
-                            // indicator
+                            // Pop the restart op indicator.
+                            ctx.pageLsns.removeLast();
                             if (isConsistent(pageId, ctx)) {
-                                node = null; // to avoid unpinning and
-                                // unlatching node again in
-                                // recursive call
-                                continue; // descend the tree again
+                                // Don't unpin and unlatch node again in recursive call.
+                                node = null; 
+                                // Descend the tree again.
+                                continue;
                             } else {
-                                ctx.pageLsns.removeLast(); // pop pageLsn of
-                                // this page
-                                // (version seen by this op
-                                // during descent)
-                                ctx.pageLsns.add(RESTART_OP); // this node is
-                                // not
-                                // consistent,
-                                // set the
-                                // restart
-                                // indicator for
-                                // upper level
+                                // Pop pageLsn of this page (version seen by this op during descent).
+                                ctx.pageLsns.removeLast(); 
+                                // This node is not consistent set the restart indicator for upper level.
+                                ctx.pageLsns.add(RESTART_OP);
                                 break;
                             }
                         }
-
+                        
                         switch (ctx.op) {
-
-                            case INSERT: {
-                                if (ctx.splitKey.getBuffer() != null) {
-                                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                                    pins++;
-                                    node.acquireWriteLatch();
-                                    writeLatchesAcquired++;
-                                    try {
-                                        insertInterior(node, pageId, ctx.splitKey.getTuple(), ctx);
-                                    } finally {
-                                        node.releaseWriteLatch();
-                                        writeLatchesReleased++;
-                                        bufferCache.unpin(node);
-                                        unpins++;
-                                    }
-                                } else {
-                                    unsetSmPages(ctx);
-                                }
-                            }
-                                break;
-
+                            case INSERT:
+                            case UPDATE:
                             case DELETE: {
+                                // Is there a propagated split key?
                                 if (ctx.splitKey.getBuffer() != null) {
                                     node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-                                    pins++;
                                     node.acquireWriteLatch();
-                                    writeLatchesAcquired++;
                                     try {
-                                        deleteInterior(node, pageId, ctx.pred.getLowKey(), ctx);
+                                        if (ctx.op == IndexOp.DELETE) {
+                                            deleteInterior(node, pageId, ctx.pred.getLowKey(), ctx);                                          
+                                        } else {
+                                            // Insert or update op. Both can cause split keys to propagate upwards.                                            
+                                            insertInterior(node, pageId, ctx.splitKey.getTuple(), ctx);
+                                        }
                                     } finally {
                                         node.releaseWriteLatch();
-                                        writeLatchesReleased++;
                                         bufferCache.unpin(node);
-                                        unpins++;
                                     }
                                 } else {
                                     unsetSmPages(ctx);
                                 }
-                            }
                                 break;
-
-                            case SEARCH: {
-                                // do nothing
                             }
+                            default: {
+                                // Do nothing for Search and DiskOrderScan.
                                 break;
-
+                            }
                         }
-
-                        repeatOp = false; // operation completed
-
+                        // Operation completed.
+                        repeatOp = false;
                     } // end while
                 } else { // smFlag
                     ctx.opRestarts++;
@@ -974,7 +754,6 @@
                             + ", RESTARTS: " + ctx.opRestarts);
                     releaseLatch(node, ctx.op, unsafeIsLeaf);
                     bufferCache.unpin(node);
-                    unpins++;
 
                     // TODO: this should be an instant duration lock, how to do
                     // this in java?
@@ -995,41 +774,39 @@
                 switch (ctx.op) {
                     case INSERT: {
                         insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
-                    }
                         break;
-
+                    }
+                    case UPDATE: {
+                        updateLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+                        break;
+                    }
                     case DELETE: {
                         deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
-                    }
                         break;
-
+                    }
                     case SEARCH: {
                         ctx.cursorInitialState.setPage(node);
                         ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
-                    }
                         break;
+                    }
                 }
             }
         } catch (TreeIndexException e) {
-            // System.out.println("BTREE EXCEPTION");
-            // System.out.println(e.getMessage());
-            // e.printStackTrace();
+            //e.printStackTrace();
             if (!e.getHandled()) {
                 releaseLatch(node, ctx.op, unsafeIsLeaf);
                 bufferCache.unpin(node);
-                unpins++;
                 e.setHandled(true);
             }
             throw e;
-        } catch (Exception e) { // this could be caused, e.g. by a
-            // failure to pin a new node during a split
-            System.out.println("ASTERIX EXCEPTION");
-            e.printStackTrace();
+        } catch (Exception e) {
+            //e.printStackTrace();
+            // This could be caused, e.g. by a failure to pin a new node during a split.
             releaseLatch(node, ctx.op, unsafeIsLeaf);
             bufferCache.unpin(node);
-            unpins++;
             BTreeException propException = new BTreeException(e);
-            propException.setHandled(true); // propagate a BTreeException,
+            propException.setHandled(true);
+            // propagate a BTreeException,
             // indicating that the parent node
             // must not be unlatched and
             // unpinned
@@ -1037,8 +814,6 @@
         }
     }
 
-    private boolean bulkNewPage = false;
-
     public final class BulkLoadContext implements IIndexBulkLoadContext {
         public final int slotSize;
         public final int leafMaxBytes;
@@ -1053,15 +828,18 @@
         private final ITreeIndexTupleWriter tupleWriter;
 
         public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
-                ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+                ITreeIndexMetaDataFrame metaFrame, MultiComparator cmp) throws HyracksDataException {
 
+        	leafFrame.setMultiComparator(cmp);
+        	interiorFrame.setMultiComparator(cmp);
+        	
             splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
             tupleWriter = leafFrame.getTupleWriter();
 
             NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
             leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
             leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
-                    bulkNewPage);
+                    true);
             leafFrontier.page.acquireWriteLatch();
 
             interiorFrame.setPage(leafFrontier.page);
@@ -1084,7 +862,7 @@
         private void addLevel() throws HyracksDataException {
             NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());
             frontier.pageId = freePageManager.getFreePage(metaFrame);
-            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
             frontier.page.acquireWriteLatch();
             frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
             interiorFrame.setPage(frontier.page);
@@ -1112,8 +890,7 @@
             BTreeSplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
             tuple = copyKey.getTuple();
 
-            frontier.lastTuple.resetByTupleOffset(frontier.page.getBuffer(),
-                    ctx.interiorFrame.getTupleOffset(ctx.interiorFrame.getTupleCount() - 1));
+            frontier.lastTuple.resetByTupleIndex(ctx.interiorFrame, ctx.interiorFrame.getTupleCount() - 1);
             int splitKeySize = ctx.tupleWriter.bytesRequired(frontier.lastTuple, 0, cmp.getKeyFieldCount());
             ctx.splitKey.initData(splitKeySize);
             ctx.tupleWriter
@@ -1121,7 +898,7 @@
             ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
             ctx.splitKey.setLeftPage(frontier.pageId);
 
-            ctx.interiorFrame.deleteGreatest(cmp);
+            ctx.interiorFrame.deleteGreatest();
 
             frontier.page.releaseWriteLatch();
             bufferCache.unpin(frontier.page);
@@ -1130,38 +907,31 @@
             ctx.splitKey.setRightPage(frontier.pageId);
             propagateBulk(ctx, level + 1);
 
-            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+            frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), true);
             frontier.page.acquireWriteLatch();
             ctx.interiorFrame.setPage(frontier.page);
             ctx.interiorFrame.initBuffer((byte) level);
         }
-        ctx.interiorFrame.insertSorted(tuple, cmp);
-
-        // debug print
-        // ISerializerDeserializer[] btreeSerde = {
-        // UTF8StringSerializerDeserializer.INSTANCE,
-        // IntegerSerializerDeserializer.INSTANCE };
-        // String s = ctx.interiorFrame.printKeys(cmp, btreeSerde);
-        // System.out.println(s);
+        ctx.interiorFrame.insertSorted(tuple);
     }
 
     // assumes btree has been created and opened
     @Override
-    public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
-            ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-
-        if (loaded)
-            throw new HyracksDataException("Trying to bulk-load BTree but BTree has already been loaded.");
-
-        BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IBTreeLeafFrame) leafFrame,
-                (IBTreeInteriorFrame) interiorFrame, metaFrame);
-        ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(cmp.getFieldCount());
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException {
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+    	if (!isEmptyTree(leafFrame)) {
+    		throw new BTreeException("Trying to Bulk-load a non-empty BTree.");
+    	}
+    	
+        BulkLoadContext ctx = new BulkLoadContext(fillFactor, leafFrame,
+                (IBTreeInteriorFrame)interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory().createFrame(), cmp);
+        ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(fieldCount);
         ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
         return ctx;
     }
 
     @Override
-    public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException {
         BulkLoadContext ctx = (BulkLoadContext) ictx;
         NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
         IBTreeLeafFrame leafFrame = ctx.leafFrame;
@@ -1171,7 +941,7 @@
 
         // try to free space by compression
         if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
-            leafFrame.compress(cmp);
+            leafFrame.compress();
             spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
         }
 
@@ -1194,7 +964,7 @@
             propagateBulk(ctx, 1);
 
             leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
-                    bulkNewPage);
+                    true);
             leafFrontier.page.acquireWriteLatch();
             leafFrame.setPage(leafFrontier.page);
             leafFrame.initBuffer((byte) 0);
@@ -1202,21 +972,14 @@
         }
 
         leafFrame.setPage(leafFrontier.page);
-        leafFrame.insertSorted(tuple, cmp);
-
-        // debug print
-        // ISerializerDeserializer[] btreeSerde = {
-        // UTF8StringSerializerDeserializer.INSTANCE,
-        // IntegerSerializerDeserializer.INSTANCE };
-        // String s = leafFrame.printKeys(cmp, btreeSerde);
-        // System.out.println(s);
+        leafFrame.insertSorted(tuple);
     }
 
     @Override
     public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
         // copy root
         BulkLoadContext ctx = (BulkLoadContext) ictx;
-        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), bulkNewPage);
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
         rootNode.acquireWriteLatch();
         NodeFrontier lastNodeFrontier = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1);
         IBTreeInteriorFrame interiorFrame = ctx.interiorFrame;
@@ -1241,18 +1004,15 @@
                 bufferCache.unpin(ctx.nodeFrontiers.get(i).page);
             }
         }
-        // debug
-        currentLevel = (byte) ctx.nodeFrontiers.size();
-
-        loaded = true;
     }
 
     @Override
-    public BTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame) {
-        return new BTreeOpContext(op, (IBTreeLeafFrame) leafFrame, (IBTreeInteriorFrame) interiorFrame, metaFrame, 6);
+    public BTreeOpContext createOpContext(IndexOp op) {
+        return new BTreeOpContext(op, (IBTreeLeafFrame) leafFrameFactory.createFrame(),
+                (IBTreeInteriorFrame) interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory()
+                        .createFrame(), cmp);
     }
-
+    
     public ITreeIndexFrameFactory getInteriorFrameFactory() {
         return interiorFrameFactory;
     }
@@ -1271,20 +1031,95 @@
 
     public int getRootPageId() {
         return rootPage;
-    }
-
-    @Override
-    public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
-        throw new Exception("BTree Update not implemented.");
-    }
+    }    
 
     @Override
     public int getFieldCount() {
-        return cmp.getFieldCount();
+        return fieldCount;
     }
 
     @Override
     public IndexType getIndexType() {
         return IndexType.BTREE;
     }
-}
\ No newline at end of file
+    
+    public byte getTreeHeight(IBTreeLeafFrame leafFrame) throws HyracksDataException {
+        ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            return leafFrame.getLevel();
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    public boolean isEmptyTree(IBTreeLeafFrame leafFrame) throws HyracksDataException {
+    	ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+        rootNode.acquireReadLatch();
+        try {
+            leafFrame.setPage(rootNode);
+            if (leafFrame.getLevel() == 0 && leafFrame.getTupleCount() == 0) {
+            	return true;
+            } else {
+            	return false;
+            }
+        } finally {
+            rootNode.releaseReadLatch();
+            bufferCache.unpin(rootNode);
+        }
+    }
+    
+    @SuppressWarnings("rawtypes") 
+    public String printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] keySerdes)
+            throws Exception {
+        byte treeHeight = getTreeHeight(leafFrame);
+        StringBuilder strBuilder = new StringBuilder();
+        printTree(rootPage, null, false, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder);
+        return strBuilder.toString();
+    }
+
+    @SuppressWarnings("rawtypes") 
+    public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
+            IBTreeInteriorFrame interiorFrame, byte treeHeight, ISerializerDeserializer[] keySerdes, StringBuilder strBuilder) throws Exception {
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        node.acquireReadLatch();
+        try {
+            if (parent != null && unpin == true) {
+                parent.releaseReadLatch();
+                bufferCache.unpin(parent);
+            }
+            interiorFrame.setPage(node);
+            int level = interiorFrame.getLevel();
+            strBuilder.append(String.format("%1d ", level));
+            strBuilder.append(String.format("%3d ", pageId) + ": ");
+            for (int i = 0; i < treeHeight - level; i++) {
+                strBuilder.append("    ");
+            }
+
+            String keyString;
+            if (interiorFrame.isLeaf()) {
+                leafFrame.setPage(node);
+                keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
+            } else {
+                keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
+            }
+
+            strBuilder.append(keyString + "\n");
+            if (!interiorFrame.isLeaf()) {
+                ArrayList<Integer> children = ((BTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
+                for (int i = 0; i < children.size(); i++) {
+                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, treeHeight, keySerdes, strBuilder);
+                }
+            } else {
+                node.releaseReadLatch();
+                bufferCache.unpin(node);
+            }
+        } catch (Exception e) {
+            node.releaseReadLatch();
+            bufferCache.unpin(node);
+            e.printStackTrace();
+        }
+    }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
index 6d65b14..82c747c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -17,45 +17,42 @@
 
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.LongArrayList;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
-public final class BTreeOpContext implements IndexOpContext {
-    public final IndexOp op;
+public final class BTreeOpContext implements IIndexOpContext {
+    private final int INIT_ARRAYLIST_SIZE = 6;    
     public final IBTreeLeafFrame leafFrame;
     public final IBTreeInteriorFrame interiorFrame;
     public final ITreeIndexMetaDataFrame metaFrame;
+    public IndexOp op;
     public ITreeIndexCursor cursor;
     public BTreeCursorInitialState cursorInitialState;
     public RangePredicate pred;
-    public final BTreeSplitKey splitKey;
+    public BTreeSplitKey splitKey;
     public int opRestarts = 0;
-    public final IntArrayList pageLsns; // used like a stack
-    public final IntArrayList smPages;
-    public final IntArrayList freePages;
+    public LongArrayList pageLsns;
+    public IntArrayList smPages;
+    public IntArrayList freePages;
 
     public BTreeOpContext(IndexOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
-            ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
-        this.op = op;
+            ITreeIndexMetaDataFrame metaFrame, MultiComparator cmp) {        
+        if (leafFrame != null) {
+        	leafFrame.setMultiComparator(cmp);
+        }
         this.leafFrame = leafFrame;
+        if (interiorFrame != null) {
+        	interiorFrame.setMultiComparator(cmp);
+        }
         this.interiorFrame = interiorFrame;
         this.metaFrame = metaFrame;
-
-        pageLsns = new IntArrayList(treeHeightHint, treeHeightHint);
-        if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
-            smPages = new IntArrayList(treeHeightHint, treeHeightHint);
-            freePages = new IntArrayList(treeHeightHint, treeHeightHint);
-            pred = new RangePredicate(true, null, null, true, true, null, null);
-            splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
-        } else {
-            smPages = null;
-            freePages = null;
-            splitKey = null;
-            cursorInitialState = new BTreeCursorInitialState(null);
-        }
+        this.pageLsns = new LongArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+        reset(op);
     }
 
     public void reset() {
@@ -67,4 +64,28 @@
             smPages.clear();
         opRestarts = 0;
     }
+
+    @Override
+    public void reset(IndexOp newOp) {
+        if (newOp == IndexOp.SEARCH || newOp == IndexOp.DISKORDERSCAN) {
+            if (cursorInitialState == null) {
+                cursorInitialState = new BTreeCursorInitialState(null);
+            }
+        } else {
+            // Insert, update or delete operation.
+            if (smPages == null) {
+                smPages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+            }
+            if (freePages == null) {
+                freePages = new IntArrayList(INIT_ARRAYLIST_SIZE, INIT_ARRAYLIST_SIZE);
+            }
+            if (pred == null) {
+                pred = new RangePredicate(true, null, null, true, true, null, null);
+            }
+            if (splitKey == null) {
+                splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
+            }
+        }
+        this.op = newOp;
+    }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
index 4c7503d..1072bfb 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -56,7 +56,7 @@
 
     public BTreeRangeSearchCursor(IBTreeLeafFrame frame) {
         this.frame = frame;
-        this.frameTuple = frame.createTupleReference();
+        this.frameTuple = frame.createTupleReference();        
     }
 
     @Override
@@ -141,7 +141,7 @@
         tupleIndex += tupleIndexInc;
     }
 
-    private int getLowKeyIndex() {
+    private int getLowKeyIndex() throws HyracksDataException {
         int index;
         if (lowKey == null)
             index = 0;
@@ -157,7 +157,7 @@
         return index;
     }
 
-    private int getHighKeyIndex() {
+    private int getHighKeyIndex() throws HyracksDataException {
         int index;
         if (highKey == null)
             index = frame.getTupleCount() - 1;
@@ -191,23 +191,19 @@
         lowKey = pred.getLowKey();
         highKey = pred.getHighKey();
 
-        // field count must be identical for lowKeyCmp and highKeyCmp (key count
-        // may be different)
-        frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
-
         // init
-        lowKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
+        lowKeyFtm = FindTupleMode.EXCLUSIVE;
         if (pred.lowKeyInclusive) {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
+            lowKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
         } else {
-            lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
+            lowKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
         }
 
-        highKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
+        highKeyFtm = FindTupleMode.EXCLUSIVE;
         if (pred.highKeyInclusive) {
-            highKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
+            highKeyFtp = FindTupleNoExactMatchPolicy.HIGHER_KEY;
         } else {
-            highKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
+            highKeyFtp = FindTupleNoExactMatchPolicy.LOWER_KEY;
         }
 
         if (pred.isForward()) {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
index 6f98c8c..6525bb9 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -20,6 +20,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
@@ -30,11 +31,13 @@
     private static final int slotSize = 4;
     public static final int TUPLE_UNCOMPRESSED = 0xFF;
     public static final int MAX_PREFIX_SLOTS = 0xFE;
-    public static final int GREATEST_SLOT = 0x00FFFFFF;
+    public static final int GREATEST_KEY_INDICATOR = 0x00FFFFFF;
+    public static final int ERROR_INDICATOR = 0x00FFFFFE;
 
     private ByteBuffer buf;
     private BTreeFieldPrefixNSMLeafFrame frame;
-
+    private MultiComparator cmp;
+    
     public int decodeFirstSlotField(int slot) {
         return (slot & 0xFF000000) >>> 24;
     }
@@ -48,7 +51,7 @@
     }
 
     // returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
-    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp) {
+    public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple) {
         int prefixMid;
         int prefixBegin = 0;
         int prefixEnd = frame.getPrefixTupleCount() - 1;
@@ -57,10 +60,10 @@
             while (prefixBegin <= prefixEnd) {
                 prefixMid = (prefixBegin + prefixEnd) / 2;
                 framePrefixTuple.resetByTupleIndex(frame, prefixMid);
-                int cmp = multiCmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
-                if (cmp < 0)
+                int cmpVal = cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
+                if (cmpVal < 0)
                     prefixEnd = prefixMid - 1;
-                else if (cmp > 0)
+                else if (cmpVal > 0)
                     prefixBegin = prefixMid + 1;
                 else
                     return prefixMid;
@@ -75,9 +78,7 @@
             ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
             FindTupleNoExactMatchPolicy matchPolicy) {
         if (frame.getTupleCount() <= 0)
-            encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_SLOT);
-
-        frameTuple.setFieldCount(multiCmp.getFieldCount());
+            encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_KEY_INDICATOR);
 
         int prefixMid;
         int prefixBegin = 0;
@@ -101,8 +102,8 @@
                 prefixBegin = prefixMid + 1;
                 tuplePrefixSlotNumUbound = prefixMid + 1;
             } else {
-                if (mode == FindTupleMode.FTM_EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
                         prefixBegin = prefixMid + 1;
                     else
                         prefixEnd = prefixMid - 1;
@@ -153,13 +154,17 @@
             else if (cmp > 0)
                 tupleBegin = tupleMid + 1;
             else {
-                if (mode == FindTupleMode.FTM_EXCLUSIVE) {
-                    if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+                if (mode == FindTupleMode.EXCLUSIVE) {
+                    if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY)
                         tupleBegin = tupleMid + 1;
                     else
                         tupleEnd = tupleMid - 1;
                 } else {
-                    return encodeSlotFields(prefixMatch, tupleMid);
+                	if (mode == FindTupleMode.EXCLUSIVE_ERROR_IF_EXISTS) {
+                		return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
+                	} else {
+                		return encodeSlotFields(prefixMatch, tupleMid);
+                	}
                 }
             }
         }
@@ -167,27 +172,27 @@
         // System.out.println("RECS: " + recBegin + " " + recMid + " " +
         // recEnd);
 
-        if (mode == FindTupleMode.FTM_EXACT)
-            return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+        if (mode == FindTupleMode.EXACT)
+            return encodeSlotFields(prefixMatch, ERROR_INDICATOR);
 
         // do final comparison to determine whether the search key is greater
         // than all keys or in between some existing keys
-        if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
+        if (matchPolicy == FindTupleNoExactMatchPolicy.HIGHER_KEY) {
             if (tupleBegin > frame.getTupleCount() - 1)
-                return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
             frameTuple.resetByTupleIndex(frame, tupleBegin);
             if (multiCmp.compare(searchKey, frameTuple) < 0)
                 return encodeSlotFields(prefixMatch, tupleBegin);
             else
-                return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
         } else {
             if (tupleEnd < 0)
-                return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
             frameTuple.resetByTupleIndex(frame, tupleEnd);
             if (multiCmp.compare(searchKey, frameTuple) > 0)
                 return encodeSlotFields(prefixMatch, tupleEnd);
             else
-                return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+                return encodeSlotFields(prefixMatch, GREATEST_KEY_INDICATOR);
         }
     }
 
@@ -217,7 +222,10 @@
 
     public int insertSlot(int slot, int tupleOff) {
         int slotNum = decodeSecondSlotField(slot);
-        if (slotNum == GREATEST_SLOT) {
+        if (slotNum == ERROR_INDICATOR) {
+        	System.out.println("WOW BIG PROBLEM!");
+        }
+        if (slotNum == GREATEST_KEY_INDICATOR) {
             int slotOff = getTupleSlotEndOff() - slotSize;
             int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
             setSlot(slotOff, newSlot);
@@ -235,11 +243,6 @@
         }
     }
 
-    public void setFrame(BTreeFieldPrefixNSMLeafFrame frame) {
-        this.frame = frame;
-        this.buf = frame.getBuffer();
-    }
-
     public int getPrefixSlotOff(int tupleIndex) {
         return getPrefixSlotStartOff() - tupleIndex * slotSize;
     }
@@ -251,4 +254,51 @@
     public void setPrefixSlot(int tupleIndex, int slot) {
         buf.putInt(getPrefixSlotOff(tupleIndex), slot);
     }
+
+	@Override
+	public int getGreatestKeyIndicator() {
+		return GREATEST_KEY_INDICATOR;
+	}
+
+	@Override
+	public int getErrorIndicator() {
+		return ERROR_INDICATOR;
+	}
+
+	@Override
+	public void setFrame(ITreeIndexFrame frame) {
+		this.frame = (BTreeFieldPrefixNSMLeafFrame)frame;
+        this.buf = frame.getBuffer();
+	}
+
+	@Override
+	public int findTupleIndex(ITupleReference searchKey,
+			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+	
+	@Override
+	public int getSlotStartOff() {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getSlotEndOff() {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getTupleOff(int slotOff) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+
+	@Override
+	public int getSlotOff(int tupleIndex) {
+		throw new UnsupportedOperationException("Not implemented.");
+	}
+	
+	public void setMultiComparator(MultiComparator cmp) {
+		this.cmp = cmp;
+	}
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
index 9a655fc..9644a3e 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
@@ -17,6 +17,7 @@
 
     public FieldPrefixTupleReference(ITreeIndexTupleReference helperTuple) {
         this.helperTuple = helperTuple;
+        this.fieldCount = helperTuple.getFieldCount();
     }
 
     @Override
@@ -46,7 +47,7 @@
 
     @Override
     public void setFieldCount(int fieldStartIndex, int fieldCount) {
-        // not implemented
+        throw new UnsupportedOperationException("Not supported.");
     }
 
     @Override
@@ -88,6 +89,28 @@
     // unsupported operation
     @Override
     public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset) {
-        frame = null;
+        throw new UnsupportedOperationException("Resetting this type of frame by offset is not supported.");
+    }
+
+    @Override
+    public int getTupleSize() {
+        return getSuffixTupleSize() + getPrefixTupleSize();
+    }
+    
+    public int getSuffixTupleSize() {
+        helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+        helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
+        return helperTuple.getTupleSize();
+    }
+    
+    public int getPrefixTupleSize() {
+        if (numPrefixFields == 0) return 0;
+        helperTuple.setFieldCount(numPrefixFields);
+        helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
+        return helperTuple.getTupleSize();
+    }
+    
+    public int getNumPrefixFields() {
+        return numPrefixFields;
     }
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
new file mode 100644
index 0000000..79945b6
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeUtils.java
@@ -0,0 +1,70 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class BTreeUtils {
+    public static BTree createBTree(IBufferCache bufferCache, int btreeFileId, ITypeTrait[] typeTraits, IBinaryComparator[] cmps, BTreeLeafFrameType leafType) throws BTreeException {
+    	MultiComparator cmp = new MultiComparator(cmps);
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = getLeafFrameFactory(tupleWriterFactory, leafType);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+        BTree btree = new BTree(bufferCache, typeTraits.length, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        return btree;
+    }
+    
+    public static MultiComparator getSearchMultiComparator(MultiComparator btreeCmp, ITupleReference searchKey) {
+        if (searchKey == null) {
+        	return btreeCmp;
+        }
+    	if (btreeCmp.getKeyFieldCount() == searchKey.getFieldCount()) {
+            return btreeCmp;
+        }
+        IBinaryComparator[] cmps = new IBinaryComparator[searchKey.getFieldCount()];
+        for (int i = 0; i < searchKey.getFieldCount(); i++) {
+            cmps[i] = btreeCmp.getComparators()[i];
+        }
+        return new MultiComparator(cmps);
+    }
+    
+    public static ITreeIndexFrameFactory getLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, BTreeLeafFrameType leafType) throws BTreeException {
+        switch(leafType) {
+            case REGULAR_NSM: {
+                return new BTreeNSMLeafFrameFactory(tupleWriterFactory);                
+            }
+            case FIELD_PREFIX_COMPRESSED_NSM: {
+                return new BTreeFieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+            }
+            default: {
+                throw new BTreeException("Unknown BTreeLeafFrameType: " + leafType.toString());
+            }
+        }
+    }
+    
+    public static MultiComparator createMultiComparator(IBinaryComparatorFactory[] cmpFactories) {
+    	IBinaryComparator[] cmps = new IBinaryComparator[cmpFactories.length];
+    	for (int i = 0; i < cmpFactories.length; i++) {
+    		cmps[i] = cmpFactories[i].createBinaryComparator(); 
+    	}
+    	return new MultiComparator(cmps);
+    }
+}
diff --git a/hyracks-storage-am-common/.classpath b/hyracks-storage-am-common/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-storage-am-common/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-storage-am-common/.project b/hyracks-storage-am-common/.project
deleted file mode 100644
index ec47f6b..0000000
--- a/hyracks-storage-am-common/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-common</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index b37b3bb..0000000
--- a/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Thu Jul 07 12:23:56 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 34ccda1..0000000
--- a/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Mon Aug 15 10:50:16 PDT 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
index 7c80be1..60e8ba9 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
@@ -19,6 +19,5 @@
 
 public interface ICursorInitialState {
 	public ICachedPage getPage();
-
 	public void setPage(ICachedPage page);
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java
new file mode 100644
index 0000000..7153f78
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexOpContext.java
@@ -0,0 +1,8 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+
+public interface IIndexOpContext {
+	void reset();
+	void reset(IndexOp newOp);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
index a6102ab..2619493 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
@@ -21,12 +21,16 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
 public interface ISlotManager {
-	public void setFrame(ITreeIndexFrame frame);
-
 	public int findTupleIndex(ITupleReference searchKey,
 			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
 			FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
 
+	public int getGreatestKeyIndicator();
+	
+	public int getErrorIndicator();
+
+	public void setFrame(ITreeIndexFrame frame);
+	
 	public int insertSlot(int tupleIndex, int tupleOff);
 
 	public int getSlotStartOff();
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
index d075285..83d0639 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
@@ -1,61 +1,199 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.hyracks.storage.am.common.api;
 
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
+/**
+ * Interface describing the operations of tree-based index structures. Indexes
+ * implementing this interface can easily reuse the tree index operators for
+ * dataflow. We assume that indexes store tuples with a fixed number of fields.
+ */
 public interface ITreeIndex {
-	// init:
 
-	public void create(int indexFileId, ITreeIndexFrame leafFrame,
-			ITreeIndexMetaDataFrame metaFrame) throws Exception;
+	/**
+	 * Initializes the persistent state of a tree index, e.g., the root page,
+	 * and metadata pages.
+	 * 
+	 * @param indexFileId
+	 *            The file id to use for this index.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 */
+	public void create(int indexFileId) throws HyracksDataException;
 
+	/**
+	 * Opens the tree index backed by the given file id.
+	 * 
+	 * @param indexFileId
+	 *            The file id backing this index.
+	 */
 	public void open(int indexFileId);
 
-	// operations:
+	/**
+	 * Closes the tree index.
+	 */
+	public void close();
 
-	public void insert(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception;
+	/**
+	 * Creates an operation context for a given index operation
+	 * (insert/delete/update/search/diskorderscan). An operation context
+	 * maintains a cache of objects used during the traversal of the tree index.
+	 * The context is intended to be reused for multiple subsequent operations
+	 * by the same user/thread. An index operation context is stateful, and
+	 * therefore, should not be shared among two threads.
+	 * 
+	 * @param indexOp
+	 *            Intended index operation.
+	 * 
+	 * @returns IITreeIndexOpContext Operation context for the desired index
+	 *          operation.
+	 */
+	public IIndexOpContext createOpContext(IndexOp op);
+	
+	/**
+	 * Inserts the given tuple into the index using an existing operation
+	 * context.
+	 * 
+	 * @param tuple
+	 *            Tuple to be inserted.
+	 * @param ictx
+	 *            Existing operation context.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 * @throws TreeIndexException
+	 *             If an index-specific constraint is violated, e.g., the key
+	 *             already exists.
+	 */
+	public void insert(ITupleReference tuple, IIndexOpContext ictx)
+			throws HyracksDataException, TreeIndexException;
 
-	public void update(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception;
+	/**
+	 * Updates the tuple in the index matching the given tuple with the new
+	 * contents in the given tuple.
+	 * 
+	 * @param tuple
+	 *            Tuple whose match in the index is to be update with the given
+	 *            tuples contents.
+	 * @param ictx
+	 *            Existing operation context.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 * @throws TreeIndexException
+	 *             If there is no matching tuple in the index.
+	 */
+	public void update(ITupleReference tuple, IIndexOpContext ictx)
+			throws HyracksDataException, TreeIndexException;
 
-	public void delete(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception;
+	/**
+	 * Deletes the tuple in the index matching the given tuple.
+	 * 
+	 * @param tuple
+	 *            Tuple to be deleted.
+	 * @param ictx
+	 *            Existing operation context.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 * @throws TreeIndexException
+	 *             If there is no matching tuple in the index.
+	 */
+	public void delete(ITupleReference tuple, IIndexOpContext ictx)
+			throws HyracksDataException, TreeIndexException;
 
-	public IndexOpContext createOpContext(IndexOp op,
-			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-			ITreeIndexMetaDataFrame metaFrame);
+	/**
+	 * Prepares the index for bulk loading, returning a bulk load context. The
+	 * index must be empty for bulk loading to be possible.
+	 * 
+	 * @param fillFactor
+	 *            Desired fill factor in [0, 1.0].
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 * @throws TreeIndexException
+	 *             If the tree is not empty.
+	 * @returns A new context for bulk loading, required for appending tuples.
+	 */
+	public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException,
+			HyracksDataException;
 
-	// bulk loading:
+	/**
+	 * Append a tuple to the index in the context of a bulk load.
+	 * 
+	 * @param tuple
+	 *            Tuple to be inserted.
+	 * @param ictx
+	 *            Existing bulk load context.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 */
+	public void bulkLoadAddTuple(ITupleReference tuple,
+			IIndexBulkLoadContext ictx) throws HyracksDataException;
 
-	public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
-			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-			ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
-
-	public void bulkLoadAddTuple(IIndexBulkLoadContext ictx,
-			ITupleReference tuple) throws HyracksDataException;
-
+	/**
+	 * Finalize the bulk loading operation in the given context.
+	 * 
+	 * @param ictx
+	 *            Existing bulk load context to be finalized.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 */
 	public void endBulkLoad(IIndexBulkLoadContext ictx)
 			throws HyracksDataException;
 
-	// search:
-	public void diskOrderScan(ITreeIndexCursor icursor,
-			ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
-			IndexOpContext ictx) throws HyracksDataException;
-
-	// utility:
-
-	public IFreePageManager getFreePageManager();
-
-	public int getRootPageId();
-
+	/**
+	 * Open the given cursor for a disk-order scan, positioning the cursor to
+	 * the first leaf tuple.
+	 * 
+	 * @param icursor
+	 *            Cursor to be opened for disk-order scanning.
+	 * @param ictx
+	 *            Existing operation context.
+	 * @throws HyracksDataException
+	 *             If the BufferCache throws while un/pinning or un/latching.
+	 */
+	public void diskOrderScan(ITreeIndexCursor icursor, IIndexOpContext ictx) throws HyracksDataException;
+	
+	/**
+	 * @return The index's leaf frame factory.
+	 */
 	public ITreeIndexFrameFactory getLeafFrameFactory();
 
+	/**
+	 * @return The index's interior frame factory.
+	 */
 	public ITreeIndexFrameFactory getInteriorFrameFactory();
+	
+	/**
+	 * @return The index's free page manager.
+	 */
+	public IFreePageManager getFreePageManager();
 
+	/**
+	 * @return The number of fields tuples of this index have.
+	 */
 	public int getFieldCount();
+	
+	/**
+	 * @return The current root page id of this index.
+	 */
+	public int getRootPageId();
 
+	/**
+	 * @return An enum of the concrete type of this index.
+	 */
 	public IndexType getIndexType();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index db458ec..c33a8d8 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -17,94 +17,76 @@
 
 import java.nio.ByteBuffer;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ITreeIndexFrame {
-	public void setPage(ICachedPage page);
-
-	public ICachedPage getPage();
-
-	public ByteBuffer getBuffer();
-
-	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
-			throws Exception;
-
-	public void insert(ITupleReference tuple, MultiComparator cmp,
-			int tupleIndex) throws Exception;
-
-	public void update(int rid, ITupleReference tuple) throws Exception;
-
-	public void delete(ITupleReference tuple, MultiComparator cmp,
-			boolean exactDelete) throws Exception;
-
-	// returns true if slots were modified, false otherwise
-	public boolean compact(MultiComparator cmp);
-
-	public boolean compress(MultiComparator cmp) throws HyracksDataException;
 
 	public void initBuffer(byte level);
+	
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple);
+	
+	public void insert(ITupleReference tuple, int tupleIndex);    
+    
+	public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex);
+	
+	public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace);    
+    
+    public void delete(ITupleReference tuple, int tupleIndex);
 
-	public int getTupleCount();
+    // returns true if slots were modified, false otherwise
+    public boolean compact();
 
-	// assumption: page must be write-latched at this point
-	public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
-			MultiComparator cmp);
+    // returns true if compressed.
+    public boolean compress() throws HyracksDataException;
 
-	public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
-			MultiComparator cmp);
+    public int getTupleCount();
 
-	public int getTupleOffset(int slotNum);
+    public int getTupleOffset(int slotNum);
 
-	public int getTotalFreeSpace();
+    public int getTotalFreeSpace();
 
-	public void setPageLsn(int pageLsn);
+    public void setPageLsn(long pageLsn);
 
-	public int getPageLsn();
+    public long getPageLsn();
 
-	// for debugging
-	public void printHeader();
+    public void setPage(ICachedPage page);
 
-	public String printKeys(MultiComparator cmp,
-			ISerializerDeserializer[] fields) throws HyracksDataException;
+    public ICachedPage getPage();
 
-	// TODO; what if tuples more than half-page size?
-	public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
-			MultiComparator cmp, ISplitKey splitKey) throws Exception;
+    public ByteBuffer getBuffer();
+    
+    // for debugging
+    public String printHeader();
 
-	public ISlotManager getSlotManager();
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException;
 
-	// ATTENTION: in b-tree operations it may not always be possible to
-	// determine whether an ICachedPage is a leaf or interior node
-	// a compatible interior and leaf implementation MUST return identical
-	// values when given the same ByteBuffer for the functions below
-	public boolean isLeaf();
+    public ISlotManager getSlotManager();
 
-	public boolean isInterior();
+    // ATTENTION: in b-tree operations it may not always be possible to
+    // determine whether an ICachedPage is a leaf or interior node
+    // a compatible interior and leaf implementation MUST return identical
+    // values when given the same ByteBuffer for the functions below
+    public boolean isLeaf();
 
-	public byte getLevel();
+    public boolean isInterior();
 
-	public void setLevel(byte level);
+    public byte getLevel();
 
-	public boolean getSmFlag(); // structure modification flag
+    public void setLevel(byte level);
 
-	public void setSmFlag(boolean smFlag);
+    public int getSlotSize();
 
-	public int getSlotSize();
+    // for debugging
+    public int getFreeSpaceOff();
 
-	// TODO: should be removed after new tuple format
-	public void setPageTupleFieldCount(int fieldCount);
+    public void setFreeSpaceOff(int freeSpace);
 
-	// for debugging
-	public int getFreeSpaceOff();
+    public ITreeIndexTupleWriter getTupleWriter();
 
-	public void setFreeSpaceOff(int freeSpace);
-
-	public ITreeIndexTupleWriter getTupleWriter();
-
-	public int getPageHeaderSize();
+    public int getPageHeaderSize();
+    
+    public ITreeIndexTupleReference createTupleReference();
 }
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
similarity index 72%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
index 8e1d0a2..75ee598 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameCompressor.java
@@ -13,11 +13,10 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
 
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 
-public interface IFrameCompressor {
-    public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+public interface ITreeIndexFrameCompressor {
+    public boolean compress(ITreeIndexFrame frame, MultiComparator cmp) throws Exception;
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
index 9ec69d9..32c77be 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
@@ -3,5 +3,6 @@
 import java.io.Serializable;
 
 public interface ITreeIndexFrameFactory extends Serializable {
-	public ITreeIndexFrame createFrame();
+    public ITreeIndexFrame createFrame();
+    public ITreeIndexTupleWriterFactory getTupleWriterFactory();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
index 4d81e5e6..17519ae 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
@@ -18,7 +18,7 @@
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public interface ITreeIndexMetaDataFrame {
-	public void initBuffer(int level);
+	public void initBuffer(byte level);
 
 	public void setPage(ICachedPage page);
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
index 8b845ac..b989dd9 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
@@ -20,11 +20,13 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface ITreeIndexTupleReference extends ITupleReference {
-	public void setFieldCount(int fieldCount);
+    public void setFieldCount(int fieldCount);
 
-	public void setFieldCount(int fieldStartIndex, int fieldCount);
+    public void setFieldCount(int fieldStartIndex, int fieldCount);
 
-	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
+    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
 
-	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
+    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
+    
+    public int getTupleSize();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
index 6cd12fb..f0bb7aa 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
@@ -20,20 +20,21 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 
 public interface ITreeIndexTupleWriter {
-	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
-			int targetOff);
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
+    
+    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff);
 
-	public int bytesRequired(ITupleReference tuple);
+    public int bytesRequired(ITupleReference tuple);
 
-	public int writeTupleFields(ITupleReference tuple, int startField,
-			int numFields, ByteBuffer targetBuf, int targetOff);
+    // TODO: change to byte[] as well.
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+            int targetOff);
 
-	public int bytesRequired(ITupleReference tuple, int startField,
-			int numFields);
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields);
 
-	// return a tuplereference instance that can read the tuple written by this
-	// writer
-	// the main idea is that the format of the written tuple may not be the same
-	// as the format written by this writer
-	public ITreeIndexTupleReference createTupleReference();
+    // return a tuplereference instance that can read the tuple written by this
+    // writer
+    // the main idea is that the format of the written tuple may not be the same
+    // as the format written by this writer
+    public ITreeIndexTupleReference createTupleReference();
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
index 48acb06..4cf596f 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
@@ -20,7 +20,7 @@
 	private static final long serialVersionUID = 1L;
 	private boolean handled = false;
 
-	public TreeIndexException(Exception e) {
+	public TreeIndexException(Exception e) {		
 		super(e);
 	}
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
index d7c7ab9..3a0710e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
@@ -21,7 +21,6 @@
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -35,7 +34,6 @@
 	protected final IFileSplitProvider fileSplitProvider;
 
 	protected final IBinaryComparatorFactory[] comparatorFactories;
-	protected final IPrimitiveValueProviderFactory[] valueProviderFactories;
 
 	protected final ITreeIndexFrameFactory interiorFrameFactory;
 	protected final ITreeIndexFrameFactory leafFrameFactory;
@@ -55,7 +53,6 @@
 			ITreeIndexFrameFactory interiorFrameFactory,
 			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
 			IBinaryComparatorFactory[] comparatorFactories,
-			IPrimitiveValueProviderFactory[] valueProviderFactories,
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, inputArity, outputArity);
 		this.fileSplitProvider = fileSplitProvider;
@@ -65,7 +62,6 @@
 		this.leafFrameFactory = leafFrameFactory;
 		this.typeTraits = typeTraits;
 		this.comparatorFactories = comparatorFactories;
-		this.valueProviderFactories = valueProviderFactories;
 		this.opHelperFactory = opHelperFactory;
 		if (outputArity > 0)
 			recordDescriptors[0] = recDesc;
@@ -87,10 +83,10 @@
 	}
 
 	@Override
-	public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories() {
-		return valueProviderFactories;
+	public int getTreeIndexFieldCount() {
+		return typeTraits.length;
 	}
-
+	
 	@Override
 	public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
 		return interiorFrameFactory;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
index 79ecb37..468c04e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
@@ -5,7 +5,6 @@
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -15,9 +14,10 @@
 
 	public IBinaryComparatorFactory[] getTreeIndexComparatorFactories();
 
-	public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories();
-
+	// TODO: Is this really needed?
 	public ITypeTrait[] getTreeIndexTypeTraits();
+	
+	public int getTreeIndexFieldCount();
 
 	public ITreeIndexFrameFactory getTreeIndexInteriorFactory();
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
index 5076c56..3f47316 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
@@ -23,7 +23,6 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -42,13 +41,12 @@
 			IFileSplitProvider fileSplitProvider,
 			ITreeIndexFrameFactory interiorFrameFactory,
 			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
-			IBinaryComparatorFactory[] comparatorFactories,
-			IPrimitiveValueProviderFactory[] valueProviderFactories,
+			IBinaryComparatorFactory[] comparatorFactories,			
 			int[] fieldPermutation, float fillFactor,
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, valueProviderFactories,
+				typeTraits, comparatorFactories,
 				opHelperFactory);
 		this.fieldPermutation = fieldPermutation;
 		this.fillFactor = fillFactor;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
index 42dfaab..63edc06 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
@@ -23,8 +23,6 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
 
 public class TreeIndexBulkLoadOperatorNodePushable extends
 		AbstractUnaryInputSinkOperatorNodePushable {
@@ -57,14 +55,11 @@
 				opDesc.getOperatorId(), 0);
 		accessor = new FrameTupleAccessor(treeIndexOpHelper
 				.getHyracksTaskContext().getFrameSize(), recDesc);
-		ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
 		try {
 			treeIndexOpHelper.init();
 			treeIndexOpHelper.getTreeIndex().open(
 					treeIndexOpHelper.getIndexFileId());
-			bulkLoadCtx = treeIndexOpHelper.getTreeIndex().beginBulkLoad(
-					fillFactor, treeIndexOpHelper.getLeafFrame(),
-					treeIndexOpHelper.getInteriorFrame(), metaFrame);
+			bulkLoadCtx = treeIndexOpHelper.getTreeIndex().beginBulkLoad(fillFactor);
 		} catch (Exception e) {
 			// cleanup in case of failure
 			treeIndexOpHelper.deinit();
@@ -78,8 +73,8 @@
 		int tupleCount = accessor.getTupleCount();
 		for (int i = 0; i < tupleCount; i++) {
 			tuple.reset(accessor, i);
-			treeIndexOpHelper.getTreeIndex().bulkLoadAddTuple(bulkLoadCtx,
-					tuple);
+			treeIndexOpHelper.getTreeIndex().bulkLoadAddTuple(tuple,
+					bulkLoadCtx);
 		}
 	}
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
index 5fcb49a..419c1d3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
@@ -41,7 +41,7 @@
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 0, 1, recDesc, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, null, null, opHelperFactory);
+				typeTraits, null, opHelperFactory);
 	}
 
 	@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
index 0b22148..ab7b335 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -24,12 +24,10 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
 public class TreeIndexDiskOrderScanOperatorNodePushable extends
 		AbstractUnaryOutputSourceOperatorNodePushable {
@@ -50,18 +48,14 @@
 				.getTreeIndexLeafFactory().createFrame();
 		ITreeIndexCursor cursor = treeIndexOpHelper
 				.createDiskOrderScanCursor(cursorFrame);
-		ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
-
-		IndexOpContext diskOrderScanOpCtx = treeIndexOpHelper
-				.getTreeIndex()
-				.createOpContext(IndexOp.DISKORDERSCAN, cursorFrame, null, null);
+		IIndexOpContext diskOrderScanOpCtx = treeIndexOpHelper.getTreeIndex()
+				.createOpContext(IndexOp.DISKORDERSCAN);
 		try {
 
 			treeIndexOpHelper.init();
 			writer.open();
 			try {
-				treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor,
-						cursorFrame, metaFrame, diskOrderScanOpCtx);
+				treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor, diskOrderScanOpCtx);
 
 				int fieldCount = treeIndexOpHelper.getTreeIndex()
 						.getFieldCount();
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
index 60fa8cc..2a59433 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
@@ -25,7 +25,6 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
@@ -47,12 +46,10 @@
 			ITreeIndexFrameFactory interiorFrameFactory,
 			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
 			IBinaryComparatorFactory[] comparatorFactories,
-			IPrimitiveValueProviderFactory[] valueProviderFactories,
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 0, 0, recDesc, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, valueProviderFactories,
-				opHelperFactory);
+				typeTraits, comparatorFactories, opHelperFactory);
 	}
 
 	@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
index b41bd98..003993d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -24,7 +24,6 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
@@ -46,12 +45,11 @@
 			ITreeIndexFrameFactory interiorFrameFactory,
 			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
 			IBinaryComparatorFactory[] comparatorFactories,
-			IPrimitiveValueProviderFactory[] valueProviderFactories,
 			int[] fieldPermutation, IndexOp op,
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, valueProviderFactories,
+				typeTraits, comparatorFactories,
 				opHelperFactory);
 		this.fieldPermutation = fieldPermutation;
 		this.op = op;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
index cc49998..0ae513b 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -23,10 +23,9 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 
 public class TreeIndexInsertUpdateDeleteOperatorNodePushable extends
 		AbstractUnaryInputUnaryOutputOperatorNodePushable {
@@ -36,7 +35,7 @@
 	private final IndexOp op;
 	private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
 	private ByteBuffer writeBuffer;
-	private IndexOpContext opCtx;
+	private IIndexOpContext opCtx;
 
 	public TreeIndexInsertUpdateDeleteOperatorNodePushable(
 			AbstractTreeIndexOperatorDescriptor opDesc,
@@ -64,10 +63,7 @@
 			treeIndexOpHelper.init();
 			treeIndexOpHelper.getTreeIndex().open(
 					treeIndexOpHelper.getIndexFileId());
-			opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op,
-					treeIndexOpHelper.getLeafFrame(),
-					treeIndexOpHelper.getInteriorFrame(),
-					new LIFOMetaDataFrame());
+			opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op);
 		} catch (Exception e) {
 			// cleanup in case of failure
 			treeIndexOpHelper.deinit();
@@ -88,23 +84,28 @@
 
 				case INSERT: {
 					treeIndex.insert(tuple, opCtx);
-				}
 					break;
+				}
 
+				case UPDATE: {
+					treeIndex.update(tuple, opCtx);
+					break;
+				}
+				
 				case DELETE: {
 					treeIndex.delete(tuple, opCtx);
-				}
 					break;
-
+				}
+					
 				default: {
 					throw new HyracksDataException("Unsupported operation "
 							+ op + " in tree index InsertUpdateDelete operator");
 				}
 
 				}
-
+			} catch (HyracksDataException e) {
+				throw e;
 			} catch (Exception e) {
-				e.printStackTrace();
 				throw new HyracksDataException(e);
 			}
 		}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
index f9fd77d..6fea7e5 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
@@ -22,7 +22,6 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
@@ -30,170 +29,116 @@
 
 public abstract class TreeIndexOpHelper {
 
-	protected ITreeIndexFrame interiorFrame;
-	protected ITreeIndexFrame leafFrame;
-	protected MultiComparator cmp;
+    protected ITreeIndexFrame interiorFrame;
+    protected ITreeIndexFrame leafFrame;
+    protected MultiComparator cmp;
 
-	protected ITreeIndex treeIndex;
-	protected int indexFileId = -1;
-	protected int partition;
+    protected ITreeIndex treeIndex;
+    protected int indexFileId = -1;
+    protected int partition;
 
-	protected ITreeIndexOperatorDescriptorHelper opDesc;
-	protected IHyracksTaskContext ctx;
+    protected ITreeIndexOperatorDescriptorHelper opDesc;
+    protected IHyracksTaskContext ctx;
 
-	protected IndexHelperOpenMode mode;
+    protected IndexHelperOpenMode mode;
 
-	public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
-			final IHyracksTaskContext ctx, int partition,
-			IndexHelperOpenMode mode) {
-		this.opDesc = opDesc;
-		this.ctx = ctx;
-		this.mode = mode;
-		this.partition = partition;
-	}
+    public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksTaskContext ctx,
+            int partition, IndexHelperOpenMode mode) {
+        this.opDesc = opDesc;
+        this.ctx = ctx;
+        this.mode = mode;
+        this.partition = partition;
+    }
 
-	public void init() throws HyracksDataException {
-		IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(
-				ctx);
-		IFileMapProvider fileMapProvider = opDesc.getStorageManager()
-				.getFileMapProvider(ctx);
-		IFileSplitProvider fileSplitProvider = opDesc
-				.getTreeIndexFileSplitProvider();
+    public void init() throws HyracksDataException {
+        IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+        IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+        IFileSplitProvider fileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
 
-		FileReference f = fileSplitProvider.getFileSplits()[partition]
-				.getLocalFile();
-		boolean fileIsMapped = fileMapProvider.isMapped(f);
+        FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+        boolean fileIsMapped = fileMapProvider.isMapped(f);
+        if (!fileIsMapped) {
+            bufferCache.createFile(f);
+        }
+        int fileId = fileMapProvider.lookupFileId(f);
+        try {
+            bufferCache.openFile(fileId);
+        } catch (HyracksDataException e) {
+            // Revert state of buffer cache since file failed to open.
+            if (!fileIsMapped) {
+                bufferCache.deleteFile(fileId);
+            }
+            throw e;
+        }
 
-		switch (mode) {
+        // Only set indexFileId member when openFile() succeeds,
+        // otherwise deinit() will try to close the file that failed to open
+        indexFileId = fileId;
 
-		case OPEN: {
-			if (!fileIsMapped) {
-				throw new HyracksDataException(
-						"Trying to open tree index from unmapped file "
-								+ f.toString());
-			}
-		}
-			break;
+        IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider().getRegistry(ctx);
+        // Create new tree and register it.
+        treeIndexRegistry.lock();
+        try {
+            // Check if tree has already been registered by another thread.
+            treeIndex = treeIndexRegistry.get(indexFileId);
+            if (treeIndex != null) {
+                return;
+            }
+            IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getTreeIndexComparatorFactories().length];
+            for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
+                comparators[i] = opDesc.getTreeIndexComparatorFactories()[i].createBinaryComparator();
+            }
+            cmp = new MultiComparator(comparators);
+            treeIndex = createTreeIndex();
+            if (mode == IndexHelperOpenMode.CREATE) {
+                try {
+                    treeIndex.create(indexFileId);
+                } catch (Exception e) {
+                	e.printStackTrace();
+                    throw new HyracksDataException(e);
+                }
+            }
+            treeIndex.open(indexFileId);
+            treeIndexRegistry.register(indexFileId, treeIndex);
+        } finally {
+            treeIndexRegistry.unlock();
+        }
+    }
 
-		case CREATE:
-		case ENLIST: {
-			if (!fileIsMapped) {
-				bufferCache.createFile(f);
-			}
-		}
-			break;
+    // MUST be overridden
+    public ITreeIndex createTreeIndex() throws HyracksDataException {
+        throw new HyracksDataException("createTreeIndex Operation not implemented.");
+    }
 
-		}
+    // MUST be overridden
+    public MultiComparator createMultiComparator(IBinaryComparator[] comparators) throws HyracksDataException {
+        throw new HyracksDataException("createComparator Operation not implemented.");
+    }
 
-		int fileId = fileMapProvider.lookupFileId(f);
-		try {
-			bufferCache.openFile(fileId);
-		} catch (HyracksDataException e) {
-			// revert state of buffer cache since file failed to open
-			if (!fileIsMapped) {
-				bufferCache.deleteFile(fileId);
-			}
-			throw e;
-		}
+    public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
+        return new TreeDiskOrderScanCursor(leafFrame);
+    }
 
-		// only set indexFileId member when openFile() succeeds,
-		// otherwise deinit() will try to close the file that failed to open
-		indexFileId = fileId;
+    public void deinit() throws HyracksDataException {
+        if (indexFileId != -1) {
+            IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+            bufferCache.closeFile(indexFileId);
+        }
+    }
 
-		interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
-		leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+    public ITreeIndex getTreeIndex() {
+        return treeIndex;
+    }
 
-		IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc
-				.getTreeIndexRegistryProvider().getRegistry(ctx);
-		treeIndex = treeIndexRegistry.get(indexFileId);
-		if (treeIndex == null) {
+    public IHyracksTaskContext getHyracksTaskContext() {
+        return ctx;
+    }
 
-			// create new tree and register it
-			treeIndexRegistry.lock();
-			try {
-				// check if tree has already been registered by another thread
-				treeIndex = treeIndexRegistry.get(indexFileId);
-				if (treeIndex == null) {
-					// this thread should create and register the tree
+    public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+        return opDesc;
+    }
 
-					IBinaryComparator[] comparators = new IBinaryComparator[opDesc
-							.getTreeIndexComparatorFactories().length];
-					for (int i = 0; i < opDesc
-							.getTreeIndexComparatorFactories().length; i++) {
-						comparators[i] = opDesc
-								.getTreeIndexComparatorFactories()[i]
-								.createBinaryComparator();
-					}
-
-					cmp = createMultiComparator(comparators);
-
-					treeIndex = createTreeIndex();
-					if (mode == IndexHelperOpenMode.CREATE) {
-						ITreeIndexMetaDataFrame metaFrame = treeIndex
-								.getFreePageManager().getMetaDataFrameFactory()
-								.createFrame();
-						try {
-							treeIndex.create(indexFileId, leafFrame, metaFrame);
-						} catch (Exception e) {
-							throw new HyracksDataException(e);
-						}
-					}
-					treeIndex.open(indexFileId);
-					treeIndexRegistry.register(indexFileId, treeIndex);
-				}
-			} finally {
-				treeIndexRegistry.unlock();
-			}
-		}
-	}
-
-	// MUST be overridden
-	public ITreeIndex createTreeIndex() throws HyracksDataException {
-		throw new HyracksDataException(
-				"createTreeIndex Operation not implemented.");
-	}
-
-	// MUST be overridden
-	public MultiComparator createMultiComparator(IBinaryComparator[] comparators)
-			throws HyracksDataException {
-		throw new HyracksDataException(
-				"createComparator Operation not implemented.");
-	}
-
-	public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame)
-			throws HyracksDataException {
-		return new TreeDiskOrderScanCursor(leafFrame);
-	}
-
-	public void deinit() throws HyracksDataException {
-		if (indexFileId != -1) {
-			IBufferCache bufferCache = opDesc.getStorageManager()
-					.getBufferCache(ctx);
-			bufferCache.closeFile(indexFileId);
-		}
-	}
-
-	public ITreeIndex getTreeIndex() {
-		return treeIndex;
-	}
-
-	public IHyracksTaskContext getHyracksTaskContext() {
-		return ctx;
-	}
-
-	public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
-		return opDesc;
-	}
-
-	public ITreeIndexFrame getLeafFrame() {
-		return leafFrame;
-	}
-
-	public ITreeIndexFrame getInteriorFrame() {
-		return interiorFrame;
-	}
-
-	public int getIndexFileId() {
-		return indexFileId;
-	}
+    public int getIndexFileId() {
+        return indexFileId;
+    }
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
index d1a1614..a91ca66 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
@@ -27,7 +27,7 @@
 			ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, null, opHelperFactory);
+				typeTraits, comparatorFactories, opHelperFactory);
 	}
 
 	@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
index 4b53453..82b1b38 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -20,8 +20,8 @@
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 
 public class TreeIndexStatsOperatorNodePushable extends
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
index bce2e19..a1a38ab 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -19,7 +19,10 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 
 public abstract class AbstractSlotManager implements ISlotManager {
-
+	
+	protected final int GREATEST_KEY_INDICATOR = -1;
+    protected final int ERROR_INDICATOR = -2;
+	
 	protected static final int slotSize = 4;
 	protected ITreeIndexFrame frame;
 
@@ -58,4 +61,14 @@
 	public int getSlotOff(int tupleIndex) {
 		return getSlotStartOff() - tupleIndex * slotSize;
 	}
+	
+	@Override
+    public int getGreatestKeyIndicator() {
+        return GREATEST_KEY_INDICATOR;
+    }
+
+    @Override
+    public int getErrorIndicator() {
+        return ERROR_INDICATOR;
+    }
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
index 7114875..da9c815 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.frames;
 
 public enum FrameOpSpaceStatus {
-	INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
+    INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE, SUFFICIENT_INPLACE_SPACE
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
index b0ec13d..c87b84a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -27,12 +27,11 @@
 
 public class LIFOMetaDataFrame implements ITreeIndexMetaDataFrame {
 
-	protected static final int tupleCountOff = 0;
-	protected static final int freeSpaceOff = tupleCountOff + 4;
-	protected static final int maxPageOff = freeSpaceOff + 4;
-	protected static final int dummyFieldOff = maxPageOff + 4;
-	protected static final byte levelOff = dummyFieldOff + 4;
-	protected static final byte nextPageOff = levelOff + 1;
+	protected static final int tupleCountOff = 0; //0
+	protected static final int freeSpaceOff = tupleCountOff + 4; //4
+	protected static final int maxPageOff = freeSpaceOff + 4; //8
+	protected static final int levelOff = maxPageOff + 12; //20
+	protected static final int nextPageOff = levelOff + 1; // 21
 
 	protected ICachedPage page = null;
 	protected ByteBuffer buf = null;
@@ -96,10 +95,11 @@
 	}
 
 	@Override
-	public void initBuffer(int level) {
-		buf.putInt(freeSpaceOff, nextPageOff + 4);
+	public void initBuffer(byte level) {
 		buf.putInt(tupleCountOff, 0);
-		buf.putInt(levelOff, level);
+		buf.putInt(freeSpaceOff, nextPageOff + 4);
+		//buf.putInt(maxPageOff, -1);
+		buf.put(levelOff, level);
 		buf.putInt(nextPageOff, -1);
 	}
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index e5c37ff..e2e28fd 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -15,332 +15,281 @@
 
 package edu.uci.ics.hyracks.storage.am.common.frames;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 
 public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
 
-	protected static final int pageLsnOff = 0; // 0
-	protected static final int tupleCountOff = pageLsnOff + 4; // 4
-	protected static final int freeSpaceOff = tupleCountOff + 4; // 8
-	protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
-	protected static final byte levelOff = totalFreeSpaceOff + 4;
-	protected static final byte smFlagOff = levelOff + 1;
+    protected static final int pageLsnOff = 0; // 0
+    protected static final int tupleCountOff = pageLsnOff + 8; // 8
+    protected static final int freeSpaceOff = tupleCountOff + 4; // 12
+    protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+    protected static final int levelOff = totalFreeSpaceOff + 4; // 20
+    protected static final int smFlagOff = levelOff + 1; // 21
 
-	protected ICachedPage page = null;
-	protected ByteBuffer buf = null;
-	protected ISlotManager slotManager;
+    protected ICachedPage page = null;
+    protected ByteBuffer buf = null;
+    protected ISlotManager slotManager;
 
-	protected ITreeIndexTupleWriter tupleWriter;
-	protected ITreeIndexTupleReference frameTuple;
+    protected ITreeIndexTupleWriter tupleWriter;
+    protected ITreeIndexTupleReference frameTuple;
 
-	public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter,
-			ISlotManager slotManager) {
-		this.tupleWriter = tupleWriter;
-		this.frameTuple = tupleWriter.createTupleReference();
-		this.slotManager = slotManager;
-	}
+    public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
+        this.tupleWriter = tupleWriter;
+        this.frameTuple = tupleWriter.createTupleReference();
+        this.slotManager = slotManager;
+        this.slotManager.setFrame(this);
+    }
 
-	@Override
-	public void initBuffer(byte level) {
-		buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
-		// during creation
-		buf.putInt(tupleCountOff, 0);
-		resetSpaceParams();
-		buf.put(levelOff, level);
-		buf.put(smFlagOff, (byte) 0);
-	}
+    @Override
+    public void initBuffer(byte level) {
+        buf.putLong(pageLsnOff, 0); // TODO: might to set to a different lsn
+        // during creation
+        buf.putInt(tupleCountOff, 0);
+        resetSpaceParams();
+        buf.put(levelOff, level);
+        buf.put(smFlagOff, (byte) 0);
+    }
 
-	@Override
-	public boolean isLeaf() {
-		return buf.get(levelOff) == 0;
-	}
+    @Override
+    public boolean isLeaf() {
+        return buf.get(levelOff) == 0;
+    }
 
-	@Override
-	public boolean isInterior() {
-		return buf.get(levelOff) > 0;
-	}
+    @Override
+    public boolean isInterior() {
+        return buf.get(levelOff) > 0;
+    }
 
-	@Override
-	public byte getLevel() {
-		return buf.get(levelOff);
-	}
+    @Override
+    public byte getLevel() {
+        return buf.get(levelOff);
+    }
 
-	@Override
-	public void setLevel(byte level) {
-		buf.put(levelOff, level);
-	}
+    @Override
+    public void setLevel(byte level) {
+        buf.put(levelOff, level);
+    }
 
-	@Override
-	public boolean getSmFlag() {
-		return buf.get(smFlagOff) != 0;
-	}
+    @Override
+    public int getFreeSpaceOff() {
+        return buf.getInt(freeSpaceOff);
+    }
 
-	@Override
-	public void setSmFlag(boolean smFlag) {
-		if (smFlag)
-			buf.put(smFlagOff, (byte) 1);
-		else
-			buf.put(smFlagOff, (byte) 0);
-	}
+    @Override
+    public void setFreeSpaceOff(int freeSpace) {
+        buf.putInt(freeSpaceOff, freeSpace);
+    }
 
-	@Override
-	public int getFreeSpaceOff() {
-		return buf.getInt(freeSpaceOff);
-	}
+    @Override
+    public void setPage(ICachedPage page) {
+        this.page = page;
+        this.buf = page.getBuffer();
+    }
 
-	@Override
-	public void setFreeSpaceOff(int freeSpace) {
-		buf.putInt(freeSpaceOff, freeSpace);
-	}
+    @Override
+    public ByteBuffer getBuffer() {
+        return page.getBuffer();
+    }
 
-	@Override
-	public void setPage(ICachedPage page) {
-		this.page = page;
-		this.buf = page.getBuffer();
-		slotManager.setFrame(this);
-	}
+    @Override
+    public ICachedPage getPage() {
+        return page;
+    }
 
-	@Override
-	public ByteBuffer getBuffer() {
-		return page.getBuffer();
-	}
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
+        int tupleCount = buf.getInt(tupleCountOff);
+        int freeSpace = buf.getInt(freeSpaceOff);
+		// Sort the slots by the tuple offset they point to.
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int slotOff = slotManager.getSlotOff(i);
+            int tupleOff = slotManager.getTupleOff(slotOff);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+        }
+        Collections.sort(sortedTupleOffs);
+        // Iterate over the sorted slots, and move their corresponding tuples to
+     	// the left, reclaiming free space.
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            frameTuple.resetByTupleOffset(buf, tupleOff);
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+            freeSpace += tupleLength;
+        }
+		// Update contiguous free space pointer and total free space indicator.
+        buf.putInt(freeSpaceOff, freeSpace);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+        return false;
+    }
 
-	@Override
-	public ICachedPage getPage() {
-		return page;
-	}
+    @Override
+    public void delete(ITupleReference tuple, int tupleIndex) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
 
-	@Override
-	public boolean compact(MultiComparator cmp) {
-		resetSpaceParams();
-		frameTuple.setFieldCount(cmp.getFieldCount());
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
 
-		int tupleCount = buf.getInt(tupleCountOff);
-		int freeSpace = buf.getInt(freeSpaceOff);
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+    }
 
-		ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-		sortedTupleOffs.ensureCapacity(tupleCount);
-		for (int i = 0; i < tupleCount; i++) {
-			int slotOff = slotManager.getSlotOff(i);
-			int tupleOff = slotManager.getTupleOff(slotOff);
-			sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-		}
-		Collections.sort(sortedTupleOffs);
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        int bytesRequired = tupleWriter.bytesRequired(tuple);
+        // Enough space in the contiguous space region?
+        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff) 
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        }
+        // Enough space after compaction?
+        if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) {
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        }
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
 
-		for (int i = 0; i < sortedTupleOffs.size(); i++) {
-			int tupleOff = sortedTupleOffs.get(i).tupleOff;
-			frameTuple.resetByTupleOffset(buf, tupleOff);
+    @Override
+    public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex) {
+    	frameTuple.resetByTupleIndex(this, oldTupleIndex);
+    	int oldTupleBytes = frameTuple.getTupleSize();
+    	int newTupleBytes = tupleWriter.bytesRequired(newTuple);
+    	int additionalBytesRequired = newTupleBytes - oldTupleBytes;
+    	// Enough space for an in-place update?
+    	if (additionalBytesRequired <= 0) {
+    		return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
+    	}
+    	// Enough space if we delete the old tuple and insert the new one without compaction? 
+    	if (newTupleBytes <= buf.capacity() - buf.getInt(freeSpaceOff)
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+    		return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+    	}
+    	// Enough space if we delete the old tuple and compact?
+    	if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
+    		return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+    	}
+        return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
 
-			int tupleEndOff = frameTuple.getFieldStart(frameTuple
-					.getFieldCount() - 1)
-					+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-			int tupleLength = tupleEndOff - tupleOff;
-			System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace,
-					tupleLength);
+    protected void resetSpaceParams() {
+        buf.putInt(freeSpaceOff, smFlagOff + 1);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
+    }
 
-			slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-			freeSpace += tupleLength;
-		}
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
 
-		buf.putInt(freeSpaceOff, freeSpace);
-		buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount
-				* slotManager.getSlotSize());
+    @Override
+    public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) {
+    	frameTuple.resetByTupleIndex(this, oldTupleIndex);
+		int oldTupleBytes = frameTuple.getTupleSize();
+		int slotOff = slotManager.getSlotOff(oldTupleIndex);
+		int bytesWritten = 0;
+    	if (inPlace) {    		
+    		// Overwrite the old tuple in place.
+    		bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), buf.getInt(slotOff));
+    	} else {
+    		// Insert the new tuple at the end of the free space, and change the slot value (effectively "deleting" the old tuple).
+    		int newTupleOff = buf.getInt(freeSpaceOff);
+    		bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), newTupleOff);
+    		// Update slot value.
+    		buf.putInt(slotOff, newTupleOff);
+    		// Update contiguous free space pointer.
+    		buf.putInt(freeSpaceOff, newTupleOff + bytesWritten);
+    	}
+    	buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
+    }
 
-		return false;
-	}
+    @Override
+    public String printHeader() {
+    	StringBuilder strBuilder = new StringBuilder();
+    	strBuilder.append("pageLsnOff:        " + pageLsnOff + "\n");
+    	strBuilder.append("tupleCountOff:     " + tupleCountOff + "\n");
+    	strBuilder.append("freeSpaceOff:      " + freeSpaceOff + "\n");
+    	strBuilder.append("totalFreeSpaceOff: " + totalFreeSpaceOff + "\n");
+    	strBuilder.append("levelOff:          " + levelOff + "\n");
+    	strBuilder.append("smFlagOff:         " + smFlagOff + "\n");
+    	return strBuilder.toString();
+    }
 
-	@Override
-	public void delete(ITupleReference tuple, MultiComparator cmp,
-			boolean exactDelete) throws Exception {
+    @Override
+    public int getTupleCount() {
+        return buf.getInt(tupleCountOff);
+    }
 
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp,
-				FindTupleMode.FTM_EXACT,
-				FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-		int slotOff = slotManager.getSlotOff(tupleIndex);
-		if (tupleIndex < 0) {
-			throw new TreeIndexException("Key to be deleted does not exist.");
-		} else {
-			if (exactDelete) {
-				// check the non-key columns for equality by byte-by-byte
-				// comparison
-				int tupleOff = slotManager.getTupleOff(slotOff);
-				frameTuple.resetByTupleOffset(buf, tupleOff);
+    public ISlotManager getSlotManager() {
+        return slotManager;
+    }
 
-				int comparison = cmp.fieldRangeCompare(tuple, frameTuple,
-						cmp.getKeyFieldCount() - 1,
-						cmp.getFieldCount() - cmp.getKeyFieldCount());
-				if (comparison != 0) {
-					throw new TreeIndexException(
-							"Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
-				}
-			}
+    @Override
+    public int getTupleOffset(int slotNum) {
+        return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
+    }
 
-			int tupleOff = slotManager.getTupleOff(slotOff);
-			frameTuple.resetByTupleOffset(buf, tupleOff);
-			int tupleSize = tupleWriter.bytesRequired(frameTuple);
+    @Override
+    public long getPageLsn() {
+        return buf.getLong(pageLsnOff);
+    }
 
-			// perform deletion (we just do a memcpy to overwrite the slot)
-			int slotStartOff = slotManager.getSlotEndOff();
-			int length = slotOff - slotStartOff;
-			System.arraycopy(buf.array(), slotStartOff, buf.array(),
-					slotStartOff + slotManager.getSlotSize(), length);
+    @Override
+    public void setPageLsn(long pageLsn) {
+        buf.putLong(pageLsnOff, pageLsn);
+    }
 
-			// maintain space information
-			buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-			buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
-					+ tupleSize + slotManager.getSlotSize());
-		}
-	}
+    @Override
+    public int getTotalFreeSpace() {
+        return buf.getInt(totalFreeSpaceOff);
+    }
 
-	@Override
-	public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
-			MultiComparator cmp) {
-		int bytesRequired = tupleWriter.bytesRequired(tuple);
-		if (bytesRequired + slotManager.getSlotSize() <= buf.capacity()
-				- buf.getInt(freeSpaceOff)
-				- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
-			return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-		else if (bytesRequired + slotManager.getSlotSize() <= buf
-				.getInt(totalFreeSpaceOff))
-			return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-		else
-			return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-	}
+    @Override
+    public boolean compress() {
+        return false;
+    }
 
-	@Override
-	public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
-			MultiComparator cmp) {
-		// TODO Auto-generated method stub
-		return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-	}
+    @Override
+    public int getSlotSize() {
+        return slotManager.getSlotSize();
+    }
 
-	protected void resetSpaceParams() {
-		buf.putInt(freeSpaceOff, smFlagOff + 1);
-		buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
-	}
-
-	@Override
-	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
-			throws Exception {
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		return slotManager.findTupleIndex(tuple, frameTuple, cmp,
-				FindTupleMode.FTM_INCLUSIVE,
-				FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-	}
-
-	@Override
-	public void insert(ITupleReference tuple, MultiComparator cmp,
-			int tupleIndex) throws Exception {
-		slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-		int bytesWritten = tupleWriter.writeTuple(tuple, buf,
-				buf.getInt(freeSpaceOff));
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-		buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
-				- bytesWritten - slotManager.getSlotSize());
-	}
-
-	@Override
-	public void update(int rid, ITupleReference tuple) throws Exception {
-		// TODO Auto-generated method stub
-
-	}
-
-	@Override
-	public void printHeader() {
-		// TODO Auto-generated method stub
-
-	}
-
-	@Override
-	public int getTupleCount() {
-		return buf.getInt(tupleCountOff);
-	}
-
-	public ISlotManager getSlotManager() {
-		return slotManager;
-	}
-
-	@Override
-	public String printKeys(MultiComparator cmp,
-			ISerializerDeserializer[] fields) throws HyracksDataException {
-		StringBuilder strBuilder = new StringBuilder();
-		int tupleCount = buf.getInt(tupleCountOff);
-		frameTuple.setFieldCount(fields.length);
-		for (int i = 0; i < tupleCount; i++) {
-			frameTuple.resetByTupleIndex(this, i);
-			for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
-				ByteArrayInputStream inStream = new ByteArrayInputStream(
-						frameTuple.getFieldData(j),
-						frameTuple.getFieldStart(j),
-						frameTuple.getFieldLength(j));
-				DataInput dataIn = new DataInputStream(inStream);
-				Object o = fields[j].deserialize(dataIn);
-				strBuilder.append(o.toString() + " ");
-			}
-			strBuilder.append(" | ");
-		}
-		strBuilder.append("\n");
-		return strBuilder.toString();
-	}
-
-	@Override
-	public int getTupleOffset(int slotNum) {
-		return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum
-				* slotManager.getSlotSize());
-	}
-
-	@Override
-	public int getPageLsn() {
-		return buf.getInt(pageLsnOff);
-	}
-
-	@Override
-	public void setPageLsn(int pageLsn) {
-		buf.putInt(pageLsnOff, pageLsn);
-	}
-
-	@Override
-	public int getTotalFreeSpace() {
-		return buf.getInt(totalFreeSpaceOff);
-	}
-
-	@Override
-	public boolean compress(MultiComparator cmp) {
-		return false;
-	}
-
-	@Override
-	public int getSlotSize() {
-		return slotManager.getSlotSize();
-	}
-
-	@Override
-	public void setPageTupleFieldCount(int fieldCount) {
-		frameTuple.setFieldCount(fieldCount);
-	}
-
-	public ITreeIndexTupleWriter getTupleWriter() {
-		return tupleWriter;
+    @Override
+    public ITreeIndexTupleWriter getTupleWriter() {
+        return tupleWriter;
+    }
+    
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+    	return tupleWriter.createTupleReference();
+    }
+    
+	public int getFreeContiguousSpace() {
+		return buf.capacity() - getFreeSpaceOff()
+				- (getTupleCount() * slotManager.getSlotSize());
 	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
index 01b91b1..18d60ec 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
@@ -21,7 +21,6 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
@@ -30,17 +29,17 @@
 
 	private int tupleIndex = 0;
 	private int fileId = -1;
-	int currentPageId = -1;
-	int maxPageId = -1;
+	private int currentPageId = -1;
+	private int maxPageId = -1;
 	private ICachedPage page = null;
 	private ITreeIndexFrame frame = null;
 	private IBufferCache bufferCache = null;
-
+	
 	private ITreeIndexTupleReference frameTuple;
 
 	public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
-		this.frame = frame;
-		this.frameTuple = frame.getTupleWriter().createTupleReference();
+		this.frame = frame;		
+		this.frameTuple = frame.createTupleReference();
 	}
 
 	@Override
@@ -62,8 +61,7 @@
 
 	private boolean positionToNextLeaf(boolean skipCurrent)
 			throws HyracksDataException {
-		while ((frame.getLevel() != 0 || skipCurrent)
-				&& (currentPageId <= maxPageId) || (frame.getTupleCount() == 0)) {
+		while ((frame.getLevel() != 0 || skipCurrent) && (currentPageId <= maxPageId)) {
 			currentPageId++;
 
 			ICachedPage nextPage = bufferCache.pin(
@@ -86,7 +84,10 @@
 	}
 
 	@Override
-	public boolean hasNext() throws Exception {
+	public boolean hasNext() throws Exception {		
+		if (currentPageId > maxPageId) {
+			return false;
+		}
 		if (tupleIndex >= frame.getTupleCount()) {
 			boolean nextLeafExists = positionToNextLeaf(true);
 			if (nextLeafExists) {
@@ -95,9 +96,8 @@
 			} else {
 				return false;
 			}
-		}
-
-		frameTuple.resetByTupleIndex(frame, tupleIndex);
+		}		
+		frameTuple.resetByTupleIndex(frame, tupleIndex);		
 		return true;
 	}
 
@@ -114,17 +114,10 @@
 			page.releaseReadLatch();
 			bufferCache.unpin(page);
 		}
-
 		page = initialState.getPage();
-		tupleIndex = 0;
+		tupleIndex = 0;		
 		frame.setPage(page);
-		MultiComparator lowKeyCmp = searchPred.getLowKeyComparator();
-		frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
-		boolean leafExists = positionToNextLeaf(false);
-		if (!leafExists) {
-			throw new HyracksDataException(
-					"Failed to open disk-order scan cursor for tree index. Traget tree index has no leaves.");
-		}
+		positionToNextLeaf(false);
 	}
 
 	@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
index 11ac257..5002189 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public enum FindTupleMode {
-	FTM_INCLUSIVE, FTM_EXCLUSIVE, FTM_EXACT
+	INCLUSIVE, EXCLUSIVE, EXCLUSIVE_ERROR_IF_EXISTS, EXACT
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
index b8f3c9a..8b3f7f5 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
@@ -16,5 +16,5 @@
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
 public enum FindTupleNoExactMatchPolicy {
-	FTP_LOWER_KEY, FTP_HIGHER_KEY
+	LOWER_KEY, HIGHER_KEY, NONE
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
deleted file mode 100644
index 9122174..0000000
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-
-public interface IndexOpContext {
-	void reset();
-}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
new file mode 100644
index 0000000..4dd1b5f
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
+
+public class LongArrayList {
+	private long[] data;
+	private int size;
+	private int first;
+	private final int growth;
+
+	public LongArrayList(int initialCapacity, int growth) {
+		data = new long[initialCapacity];
+		size = 0;
+		first = 0;
+		this.growth = growth;
+	}
+
+	public int size() {
+		return size;
+	}
+
+	public int first() {
+		return first;
+	}
+
+	public void add(long i) {
+		if (size == data.length) {
+			long[] newData = new long[data.length + growth];
+			System.arraycopy(data, 0, newData, 0, data.length);
+			data = newData;
+		}
+
+		data[size++] = i;
+	}
+
+	public void removeLast() {
+		if (size > 0)
+			size--;
+	}
+
+	// WARNING: caller is responsible for checking size > 0
+	public long getLast() {
+		return data[size - 1];
+	}
+
+	public long get(int i) {
+		return data[i];
+	}
+
+	// WARNING: caller is responsible for checking i < size
+	public void set(int i, long value) {
+		data[i] = value;
+
+	}
+
+	public long getFirst() {
+		return data[first];
+	}
+
+	public void moveFirst() {
+		first++;
+	}
+
+	public void clear() {
+		size = 0;
+		first = 0;
+	}
+
+	public boolean isLast() {
+		return size == first;
+	}
+
+	public boolean isEmpty() {
+		return size == 0;
+	}
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
index a07ef03..df24484 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
@@ -15,45 +15,17 @@
 
 package edu.uci.ics.hyracks.storage.am.common.ophelpers;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 
 public class MultiComparator {
 
-	private static final long serialVersionUID = 1L;
+	private final IBinaryComparator[] cmps;
 
-	private IBinaryComparator[] cmps = null;
-	private ITypeTrait[] typeTraits;
-	private IPrimitiveValueProvider[] valueProviders = null;
-
-	private IBinaryComparator intCmp = IntegerBinaryComparatorFactory.INSTANCE
-			.createBinaryComparator();
-
-	public IBinaryComparator getIntCmp() {
-		return intCmp;
-	}
-
-	public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
-		this.typeTraits = typeTraits;
+	public MultiComparator(IBinaryComparator[] cmps) {
 		this.cmps = cmps;
 	}
 
-	public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps,
-			IPrimitiveValueProvider[] valueProviders) {
-		this.typeTraits = typeTraits;
-		this.cmps = cmps;
-		this.valueProviders = valueProviders;
-	}
-
 	public int compare(ITupleReference tupleA, ITupleReference tupleB) {
 		for (int i = 0; i < cmps.length; i++) {
 			int cmp = cmps[i].compare(tupleA.getFieldData(i),
@@ -83,20 +55,6 @@
 		return 0;
 	}
 
-	public String printTuple(ITupleReference tuple,
-			ISerializerDeserializer[] fields) throws HyracksDataException {
-		StringBuilder strBuilder = new StringBuilder();
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			ByteArrayInputStream inStream = new ByteArrayInputStream(
-					tuple.getFieldData(i), tuple.getFieldStart(i),
-					tuple.getFieldLength(i));
-			DataInput dataIn = new DataInputStream(inStream);
-			Object o = fields[i].deserialize(dataIn);
-			strBuilder.append(o.toString() + " ");
-		}
-		return strBuilder.toString();
-	}
-
 	public IBinaryComparator[] getComparators() {
 		return cmps;
 	}
@@ -104,21 +62,4 @@
 	public int getKeyFieldCount() {
 		return cmps.length;
 	}
-
-	public void setComparators(IBinaryComparator[] cmps) {
-		this.cmps = cmps;
-	}
-
-	public int getFieldCount() {
-		return typeTraits.length;
-	}
-
-	public ITypeTrait[] getTypeTraits() {
-		return typeTraits;
-	}
-
-	public IPrimitiveValueProvider[] getValueProviders() {
-		return valueProviders;
-	}
-
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
index 35231c2..7e9042c 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
@@ -30,4 +30,9 @@
 	public int compareTo(SlotOffTupleOff o) {
 		return tupleOff - o.tupleOff;
 	}
+	
+	@Override 
+	public String toString() {
+		return tupleIndex + " " + slotOff + " " + tupleOff;
+	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index 353bd95..a470d04 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -22,77 +22,78 @@
 
 public class SimpleTupleReference implements ITreeIndexTupleReference {
 
-	protected ByteBuffer buf;
-	protected int fieldStartIndex;
-	protected int fieldCount;
-	protected int tupleStartOff;
-	protected int nullFlagsBytes;
-	protected int fieldSlotsBytes;
+    protected ByteBuffer buf;
+    protected int fieldStartIndex;
+    protected int fieldCount;
+    protected int tupleStartOff;
+    protected int nullFlagsBytes;
+    protected int fieldSlotsBytes;
+
+    @Override
+    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+        this.buf = buf;
+        this.tupleStartOff = tupleStartOff;
+    }
+
+    @Override
+    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+        resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+    }
+
+    @Override
+    public void setFieldCount(int fieldCount) {
+        this.fieldCount = fieldCount;
+        nullFlagsBytes = getNullFlagsBytes();
+        fieldSlotsBytes = getFieldSlotsBytes();
+        fieldStartIndex = 0;
+    }
+
+    @Override
+    public void setFieldCount(int fieldStartIndex, int fieldCount) {
+        this.fieldCount = fieldCount;
+        this.fieldStartIndex = fieldStartIndex;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    @Override
+    public byte[] getFieldData(int fIdx) {
+        return buf.array();
+    }
+
+    @Override
+    public int getFieldLength(int fIdx) {
+        if (fIdx == 0) {
+            return buf.getShort(tupleStartOff + nullFlagsBytes);
+        } else {
+            return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
+                    - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+        }
+    }
+
+    @Override
+    public int getFieldStart(int fIdx) {
+        if (fIdx == 0) {
+            return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
+        } else {
+            return tupleStartOff + nullFlagsBytes + fieldSlotsBytes
+                    + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+        }
+    }
+
+    protected int getNullFlagsBytes() {
+        return (int) Math.ceil(fieldCount / 8.0);
+    }
+
+    protected int getFieldSlotsBytes() {
+        return fieldCount * 2;
+    }
 
 	@Override
-	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
-		this.buf = buf;
-		this.tupleStartOff = tupleStartOff;
-	}
-
-	@Override
-	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-		resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
-	}
-
-	@Override
-	public void setFieldCount(int fieldCount) {
-		this.fieldCount = fieldCount;
-		nullFlagsBytes = getNullFlagsBytes();
-		fieldSlotsBytes = getFieldSlotsBytes();
-		fieldStartIndex = 0;
-	}
-
-	@Override
-	public void setFieldCount(int fieldStartIndex, int fieldCount) {
-		this.fieldCount = fieldCount;
-		this.fieldStartIndex = fieldStartIndex;
-	}
-
-	@Override
-	public int getFieldCount() {
-		return fieldCount;
-	}
-
-	@Override
-	public byte[] getFieldData(int fIdx) {
-		return buf.array();
-	}
-
-	@Override
-	public int getFieldLength(int fIdx) {
-		if (fIdx == 0) {
-			return buf.getShort(tupleStartOff + nullFlagsBytes);
-		} else {
-			return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
-					- buf.getShort(tupleStartOff + nullFlagsBytes
-							+ ((fIdx - 1) * 2));
-		}
-	}
-
-	@Override
-	public int getFieldStart(int fIdx) {
-		if (fIdx == 0) {
-			return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
-		} else {
-			return tupleStartOff
-					+ nullFlagsBytes
-					+ fieldSlotsBytes
-					+ buf.getShort(tupleStartOff + nullFlagsBytes
-							+ ((fIdx - 1) * 2));
-		}
-	}
-
-	protected int getNullFlagsBytes() {
-		return (int) Math.ceil(fieldCount / 8.0);
-	}
-
-	protected int getFieldSlotsBytes() {
-		return fieldCount * 2;
+	public int getTupleSize() {
+		return nullFlagsBytes + fieldSlotsBytes + buf.getShort(tupleStartOff + nullFlagsBytes + (fieldCount-1) * 2);
 	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
index 11f7820..831247e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -23,95 +23,103 @@
 
 public class SimpleTupleWriter implements ITreeIndexTupleWriter {
 
-	@Override
-	public int bytesRequired(ITupleReference tuple) {
-		int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			bytes += tuple.getFieldLength(i);
-		}
-		return bytes;
+	// Write short in little endian to target byte array at given offset.
+	private static void writeShortL(short s, byte[] buf, int targetOff) {
+		buf[targetOff] = (byte)(s >> 8);
+		buf[targetOff + 1] = (byte)(s >> 0);
+	}
+	
+	// Write short in big endian to target byte array at given offset.
+	private static void writeShortB(short s, byte[] buf, int targetOff) {
+		buf[targetOff] = (byte) (s >> 0);
+		buf[targetOff + 1] = (byte) (s >> 8);
+	}
+	
+    @Override
+    public int bytesRequired(ITupleReference tuple) {
+        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+        int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+        for (int i = startField; i < startField + numFields; i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
+
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return new SimpleTupleReference();
+    }
+
+    @Override
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+        return writeTuple(tuple, targetBuf.array(), targetOff);
+    }
+    
+    @Override
+	public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+    	int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple);
+        int fieldSlotsBytes = getFieldSlotsBytes(tuple);
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf[runner++] = (byte) 0;
+        }
+        runner += fieldSlotsBytes;
+        int fieldEndOff = 0;
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+                    tuple.getFieldLength(i));
+            fieldEndOff += tuple.getFieldLength(i);
+            runner += tuple.getFieldLength(i);
+            writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + i * 2);
+        }
+        return runner - targetOff;
 	}
 
-	@Override
-	public int bytesRequired(ITupleReference tuple, int startField,
-			int numFields) {
-		int bytes = getNullFlagsBytes(tuple, startField, numFields)
-				+ getFieldSlotsBytes(tuple, startField, numFields);
-		for (int i = startField; i < startField + numFields; i++) {
-			bytes += tuple.getFieldLength(i);
-		}
-		return bytes;
-	}
+    @Override
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+            int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf.put(runner++, (byte) 0);
+        }
+        runner += getFieldSlotsBytes(tuple, startField, numFields);
 
-	@Override
-	public ITreeIndexTupleReference createTupleReference() {
-		return new SimpleTupleReference();
-	}
+        int fieldEndOff = 0;
+        int fieldCounter = 0;
+        for (int i = startField; i < startField + numFields; i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+                    tuple.getFieldLength(i));
+            fieldEndOff += tuple.getFieldLength(i);
+            runner += tuple.getFieldLength(i);
+            targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
+            fieldCounter++;
+        }
 
-	@Override
-	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
-			int targetOff) {
-		int runner = targetOff;
-		int nullFlagsBytes = getNullFlagsBytes(tuple);
-		int fieldSlotsBytes = getFieldSlotsBytes(tuple);
-		for (int i = 0; i < nullFlagsBytes; i++) {
-			targetBuf.put(runner++, (byte) 0);
-		}
-		runner += fieldSlotsBytes;
+        return runner - targetOff;
+    }
 
-		int fieldEndOff = 0;
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
-					targetBuf.array(), runner, tuple.getFieldLength(i));
-			fieldEndOff += tuple.getFieldLength(i);
-			runner += tuple.getFieldLength(i);
-			targetBuf.putShort(targetOff + nullFlagsBytes + i * 2,
-					(short) fieldEndOff);
-		}
+    protected int getNullFlagsBytes(ITupleReference tuple) {
+        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+    }
 
-		return runner - targetOff;
-	}
+    protected int getFieldSlotsBytes(ITupleReference tuple) {
+        return tuple.getFieldCount() * 2;
+    }
 
-	@Override
-	public int writeTupleFields(ITupleReference tuple, int startField,
-			int numFields, ByteBuffer targetBuf, int targetOff) {
-		int runner = targetOff;
-		int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
-		for (int i = 0; i < nullFlagsBytes; i++) {
-			targetBuf.put(runner++, (byte) 0);
-		}
-		runner += getFieldSlotsBytes(tuple, startField, numFields);
+    protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+        return (int) Math.ceil((double) numFields / 8.0);
+    }
 
-		int fieldEndOff = 0;
-		int fieldCounter = 0;
-		for (int i = startField; i < startField + numFields; i++) {
-			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
-					targetBuf.array(), runner, tuple.getFieldLength(i));
-			fieldEndOff += tuple.getFieldLength(i);
-			runner += tuple.getFieldLength(i);
-			targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2,
-					(short) fieldEndOff);
-			fieldCounter++;
-		}
-
-		return runner - targetOff;
-	}
-
-	protected int getNullFlagsBytes(ITupleReference tuple) {
-		return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-	}
-
-	protected int getFieldSlotsBytes(ITupleReference tuple) {
-		return tuple.getFieldCount() * 2;
-	}
-
-	protected int getNullFlagsBytes(ITupleReference tuple, int startField,
-			int numFields) {
-		return (int) Math.ceil((double) numFields / 8.0);
-	}
-
-	protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
-			int numFields) {
-		return numFields * 2;
-	}
+    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+        return numFields * 2;
+    }	
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
index 31b32e6..72a36a1 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
@@ -22,99 +22,105 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 
 public class TypeAwareTupleReference implements ITreeIndexTupleReference {
-	protected ByteBuffer buf;
-	protected int fieldStartIndex;
-	protected int fieldCount;
-	protected int tupleStartOff;
-	protected int nullFlagsBytes;
-	protected int dataStartOff;
+    protected ByteBuffer buf;
+    protected int fieldStartIndex;
+    protected int fieldCount;
+    protected int tupleStartOff;
+    protected int nullFlagsBytes;
+    protected int dataStartOff;
 
-	protected ITypeTrait[] typeTraits;
-	protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
-	protected int[] decodedFieldSlots;
+    protected ITypeTrait[] typeTraits;
+    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+    protected int[] decodedFieldSlots;
 
-	public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
-		this.typeTraits = typeTraits;
-		this.fieldStartIndex = 0;
-	}
+    public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
+        this.typeTraits = typeTraits;
+        this.fieldStartIndex = 0;
+        setFieldCount(typeTraits.length);
+    }
+
+    @Override
+    public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+        this.buf = buf;
+        this.tupleStartOff = tupleStartOff;
+
+        // decode field slots
+        int field = 0;
+        int cumul = 0;
+        int end = fieldStartIndex + fieldCount;
+        encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
+        for (int i = fieldStartIndex; i < end; i++) {
+            int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
+            if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
+                cumul += encDec.decode();
+                decodedFieldSlots[field++] = cumul;
+            } else {
+                cumul += staticDataLen;
+                decodedFieldSlots[field++] = cumul;
+            }
+        }
+        dataStartOff = encDec.getPos();
+    }
+
+    @Override
+    public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+        resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+    }
+
+    @Override
+    public void setFieldCount(int fieldCount) {
+        this.fieldCount = fieldCount;
+        if (decodedFieldSlots == null) {
+            decodedFieldSlots = new int[fieldCount];
+        } else {
+            if (fieldCount > decodedFieldSlots.length) {
+                decodedFieldSlots = new int[fieldCount];
+            }
+        }
+        nullFlagsBytes = getNullFlagsBytes();
+        this.fieldStartIndex = 0;
+    }
+
+    @Override
+    public void setFieldCount(int fieldStartIndex, int fieldCount) {
+        setFieldCount(fieldCount);
+        this.fieldStartIndex = fieldStartIndex;
+    }
+
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
+
+    @Override
+    public byte[] getFieldData(int fIdx) {
+        return buf.array();
+    }
+
+    @Override
+    public int getFieldLength(int fIdx) {
+        if (fIdx == 0) {
+            return decodedFieldSlots[0];
+        } else {
+            return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
+        }
+    }
+
+    @Override
+    public int getFieldStart(int fIdx) {
+        if (fIdx == 0) {
+            return dataStartOff;
+        } else {
+            return dataStartOff + decodedFieldSlots[fIdx - 1];
+        }
+    }
+
+    protected int getNullFlagsBytes() {
+        return (int) Math.ceil(fieldCount / 8.0);
+    }
 
 	@Override
-	public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
-		this.buf = buf;
-		this.tupleStartOff = tupleStartOff;
-
-		// decode field slots
-		int field = 0;
-		int cumul = 0;
-		int end = fieldStartIndex + fieldCount;
-		encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
-		for (int i = fieldStartIndex; i < end; i++) {
-			int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
-			if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
-				cumul += encDec.decode();
-				decodedFieldSlots[field++] = cumul;
-			} else {
-				cumul += staticDataLen;
-				decodedFieldSlots[field++] = cumul;
-			}
-		}
-		dataStartOff = encDec.getPos();
-	}
-
-	@Override
-	public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
-		resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
-	}
-
-	@Override
-	public void setFieldCount(int fieldCount) {
-		this.fieldCount = fieldCount;
-		if (decodedFieldSlots == null) {
-			decodedFieldSlots = new int[fieldCount];
-		} else {
-			if (fieldCount > decodedFieldSlots.length) {
-				decodedFieldSlots = new int[fieldCount];
-			}
-		}
-		nullFlagsBytes = getNullFlagsBytes();
-		this.fieldStartIndex = 0;
-	}
-
-	@Override
-	public void setFieldCount(int fieldStartIndex, int fieldCount) {
-		setFieldCount(fieldCount);
-		this.fieldStartIndex = fieldStartIndex;
-	}
-
-	@Override
-	public int getFieldCount() {
-		return fieldCount;
-	}
-
-	@Override
-	public byte[] getFieldData(int fIdx) {
-		return buf.array();
-	}
-
-	@Override
-	public int getFieldLength(int fIdx) {
-		if (fIdx == 0) {
-			return decodedFieldSlots[0];
-		} else {
-			return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
-		}
-	}
-
-	@Override
-	public int getFieldStart(int fIdx) {
-		if (fIdx == 0) {
-			return dataStartOff;
-		} else {
-			return dataStartOff + decodedFieldSlots[fIdx - 1];
-		}
-	}
-
-	protected int getNullFlagsBytes() {
-		return (int) Math.ceil(fieldCount / 8.0);
+	public int getTupleSize() {
+		return dataStartOff - tupleStartOff + decodedFieldSlots[fieldCount-1];
 	}
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
index 95468d4..6b33712 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -24,131 +24,132 @@
 
 public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
 
-	protected ITypeTrait[] typeTraits;
-	protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+    protected ITypeTrait[] typeTraits;
+    protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
 
-	public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
-		this.typeTraits = typeTraits;
-	}
+    public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+        this.typeTraits = typeTraits;
+    }
 
-	@Override
-	public int bytesRequired(ITupleReference tuple) {
-		int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			bytes += tuple.getFieldLength(i);
-		}
-		return bytes;
-	}
+    @Override
+    public int bytesRequired(ITupleReference tuple) {
+        int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
 
-	@Override
-	public int bytesRequired(ITupleReference tuple, int startField,
-			int numFields) {
-		int bytes = getNullFlagsBytes(numFields)
-				+ getFieldSlotsBytes(tuple, startField, numFields);
-		for (int i = startField; i < startField + numFields; i++) {
-			bytes += tuple.getFieldLength(i);
-		}
-		return bytes;
-	}
+    @Override
+    public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+        int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+        for (int i = startField; i < startField + numFields; i++) {
+            bytes += tuple.getFieldLength(i);
+        }
+        return bytes;
+    }
 
-	@Override
-	public ITreeIndexTupleReference createTupleReference() {
-		return new TypeAwareTupleReference(typeTraits);
-	}
+    @Override
+    public ITreeIndexTupleReference createTupleReference() {
+        return new TypeAwareTupleReference(typeTraits);
+    }
 
-	@Override
-	public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
-			int targetOff) {
-		int runner = targetOff;
-		int nullFlagsBytes = getNullFlagsBytes(tuple);
-		// write null indicator bits
-		for (int i = 0; i < nullFlagsBytes; i++) {
-			targetBuf.put(runner++, (byte) 0);
-		}
+    @Override
+    public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+        return writeTuple(tuple, targetBuf.array(), targetOff);
+    }
 
-		// write field slots for variable length fields
-		encDec.reset(targetBuf.array(), runner);
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-				encDec.encode(tuple.getFieldLength(i));
-			}
-		}
-		runner = encDec.getPos();
+    @Override
+    public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(tuple);
+        // write null indicator bits
+        for (int i = 0; i < nullFlagsBytes; i++) {
+        	targetBuf[runner++] = (byte) 0;
+        }
 
-		// write data fields
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
-					targetBuf.array(), runner, tuple.getFieldLength(i));
-			runner += tuple.getFieldLength(i);
-		}
+        // write field slots for variable length fields
+        encDec.reset(targetBuf, runner);
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+                encDec.encode(tuple.getFieldLength(i));
+            }
+        }
+        runner = encDec.getPos();
 
-		return runner - targetOff;
-	}
+        // write data fields
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            int s = tuple.getFieldStart(i);
+            int l = tuple.getFieldLength(i);
+        	System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+                    tuple.getFieldLength(i));
+            runner += tuple.getFieldLength(i);
+        }
 
-	@Override
-	public int writeTupleFields(ITupleReference tuple, int startField,
-			int numFields, ByteBuffer targetBuf, int targetOff) {
-		int runner = targetOff;
-		int nullFlagsBytes = getNullFlagsBytes(numFields);
-		// write null indicator bits
-		for (int i = 0; i < nullFlagsBytes; i++) {
-			targetBuf.put(runner++, (byte) 0);
-		}
+        return runner - targetOff;
+    }
+    
+    @Override
+    public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+            int targetOff) {
+        int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(numFields);
+        // write null indicator bits
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf.put(runner++, (byte) 0);
+        }
 
-		// write field slots for variable length fields
-		encDec.reset(targetBuf.array(), runner);
-		for (int i = startField; i < startField + numFields; i++) {
-			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-				encDec.encode(tuple.getFieldLength(i));
-			}
-		}
-		runner = encDec.getPos();
+        // write field slots for variable length fields
+        encDec.reset(targetBuf.array(), runner);
+        for (int i = startField; i < startField + numFields; i++) {
+            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+                encDec.encode(tuple.getFieldLength(i));
+            }
+        }
+        runner = encDec.getPos();
 
-		for (int i = startField; i < startField + numFields; i++) {
-			System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
-					targetBuf.array(), runner, tuple.getFieldLength(i));
-			runner += tuple.getFieldLength(i);
-		}
+        for (int i = startField; i < startField + numFields; i++) {
+            System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+                    tuple.getFieldLength(i));
+            runner += tuple.getFieldLength(i);
+        }
 
-		return runner - targetOff;
-	}
+        return runner - targetOff;
+    }
 
-	protected int getNullFlagsBytes(ITupleReference tuple) {
-		return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
-	}
+    protected int getNullFlagsBytes(ITupleReference tuple) {
+        return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+    }
 
-	protected int getFieldSlotsBytes(ITupleReference tuple) {
-		int fieldSlotBytes = 0;
-		for (int i = 0; i < tuple.getFieldCount(); i++) {
-			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-				fieldSlotBytes += encDec.getBytesRequired(tuple
-						.getFieldLength(i));
-			}
-		}
-		return fieldSlotBytes;
-	}
+    protected int getFieldSlotsBytes(ITupleReference tuple) {
+        int fieldSlotBytes = 0;
+        for (int i = 0; i < tuple.getFieldCount(); i++) {
+            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+            }
+        }
+        return fieldSlotBytes;
+    }
 
-	protected int getNullFlagsBytes(int numFields) {
-		return (int) Math.ceil((double) numFields / 8.0);
-	}
+    protected int getNullFlagsBytes(int numFields) {
+        return (int) Math.ceil((double) numFields / 8.0);
+    }
 
-	protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
-			int numFields) {
-		int fieldSlotBytes = 0;
-		for (int i = startField; i < startField + numFields; i++) {
-			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-				fieldSlotBytes += encDec.getBytesRequired(tuple
-						.getFieldLength(i));
-			}
-		}
-		return fieldSlotBytes;
-	}
+    protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+        int fieldSlotBytes = 0;
+        for (int i = startField; i < startField + numFields; i++) {
+            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+                fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+            }
+        }
+        return fieldSlotBytes;
+    }
 
-	public ITypeTrait[] getTypeTraits() {
-		return typeTraits;
-	}
+    public ITypeTrait[] getTypeTraits() {
+        return typeTraits;
+    }
 
-	public void setTypeTraits(ITypeTrait[] typeTraits) {
-		this.typeTraits = typeTraits;
-	}
+    public void setTypeTraits(ITypeTrait[] typeTraits) {
+        this.typeTraits = typeTraits;
+    }
 }
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
similarity index 97%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
index 8179d58..65ea8de 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexBufferCacheWarmup.java
@@ -1,4 +1,4 @@
-package edu.uci.ics.hyracks.storage.am.common.utility;
+package edu.uci.ics.hyracks.storage.am.common.util;
 
 import java.util.ArrayList;
 import java.util.Random;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
similarity index 98%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
index 2754743..d5d9b5d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStats.java
@@ -1,4 +1,4 @@
-package edu.uci.ics.hyracks.storage.am.common.utility;
+package edu.uci.ics.hyracks.storage.am.common.util;
 
 import java.text.DecimalFormat;
 
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
similarity index 97%
rename from hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
index 9167732..eeacccd 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexStatsGatherer.java
@@ -1,4 +1,4 @@
-package edu.uci.ics.hyracks.storage.am.common.utility;
+package edu.uci.ics.hyracks.storage.am.common.util;
 
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java
new file mode 100644
index 0000000..a1e493d
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/util/TreeIndexUtils.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+
+@SuppressWarnings("rawtypes") 
+public class TreeIndexUtils {
+	public static String printFrameTuples(ITreeIndexFrame frame, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {		
+		StringBuilder strBuilder = new StringBuilder();
+		ITreeIndexTupleReference tuple = frame.createTupleReference();
+		for (int i = 0; i < frame.getTupleCount(); i++) {
+			tuple.resetByTupleIndex(frame, i);
+			String tupleString = TupleUtils.printTuple(tuple, fieldSerdes);
+			strBuilder.append(tupleString);
+			if (i != frame.getTupleCount() - 1) {
+				strBuilder.append(" | ");
+			}
+		}
+		return strBuilder.toString();
+    }
+}
diff --git a/hyracks-storage-am-invertedindex/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-invertedindex/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 1e91fb3..0000000
--- a/hyracks-storage-am-invertedindex/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Thu May 05 14:44:24 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-storage-am-invertedindex/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-am-invertedindex/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index e5e549a..0000000
--- a/hyracks-storage-am-invertedindex/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu May 05 14:44:18 PDT 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
index 9db23ef..b93f064 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
@@ -96,14 +96,14 @@
     }
     
     @Override
-    public IPrimitiveValueProviderFactory[] getTreeIndexValueProviderFactories() {
-        return null;
-    }
-
-    @Override
     public ITypeTrait[] getTreeIndexTypeTraits() {
         return btreeTypeTraits;
     }
+    
+    @Override
+    public int getTreeIndexFieldCount() {
+        return btreeTypeTraits.length;
+    }
 
     @Override
     public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java
index 1eb6757..684bb20 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java
@@ -108,7 +108,7 @@
                         comparators[i] = opDesc.getInvIndexComparatorFactories()[i].createBinaryComparator();
                     }
 
-                    MultiComparator cmp = new MultiComparator(opDesc.getInvIndexTypeTraits(), comparators);
+                    MultiComparator cmp = new MultiComparator(comparators);
 
                     // assumes btree has already been registered
                     IFileSplitProvider btreeFileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
@@ -123,7 +123,7 @@
                     int btreeFileId = fileMapProvider.lookupFileId(f);
                     BTree btree = (BTree) treeIndexRegistry.get(btreeFileId);
 
-                    invIndex = new InvertedIndex(bufferCache, btree, cmp);
+                    invIndex = new InvertedIndex(bufferCache, btree, opDesc.getInvIndexTypeTraits(), cmp);
                     invIndex.open(invIndexFileId);
                     invIndexRegistry.register(invIndexFileId, invIndex);
                 }
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
index 9eab110..75fdf2a 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
@@ -18,6 +18,7 @@
 import java.nio.ByteBuffer;
 
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
@@ -32,6 +33,7 @@
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
@@ -53,14 +55,16 @@
     private int rootPageId = 0;
     private IBufferCache bufferCache;
     private int fileId;
+    private final ITypeTrait[] typeTraits;
     private final MultiComparator invListCmp;
     private final int numTokenFields;
     private final int numInvListKeys;
 
-    public InvertedIndex(IBufferCache bufferCache, BTree btree, MultiComparator invListCmp) {
+    public InvertedIndex(IBufferCache bufferCache, BTree btree, ITypeTrait[] typeTraits, MultiComparator invListCmp) {
         this.bufferCache = bufferCache;
         this.btree = btree;
         this.invListCmp = invListCmp;
+        this.typeTraits = typeTraits;
         this.numTokenFields = btree.getMultiComparator().getKeyFieldCount();
         this.numInvListKeys = invListCmp.getKeyFieldCount();
     }
@@ -74,7 +78,7 @@
     }
 
     public BulkLoadContext beginBulkLoad(IInvertedListBuilder invListBuilder, int hyracksFrameSize,
-            float btreeFillFactor) throws HyracksDataException {
+            float btreeFillFactor) throws HyracksDataException, TreeIndexException {
         BulkLoadContext ctx = new BulkLoadContext(invListBuilder, hyracksFrameSize, btreeFillFactor);
         ctx.init(rootPageId, fileId);
         return ctx;
@@ -197,7 +201,7 @@
         // reset tuple reference
         ctx.btreeFrameTupleReference.reset(ctx.btreeFrameTupleAccessor, 0);
 
-        btree.bulkLoadAddTuple(ctx.btreeBulkLoadCtx, ctx.btreeFrameTupleReference);
+        btree.bulkLoadAddTuple(ctx.btreeFrameTupleReference, ctx.btreeBulkLoadCtx);
     }
 
     public void endBulkLoad(BulkLoadContext ctx) throws HyracksDataException {
@@ -218,6 +222,10 @@
     public MultiComparator getInvListElementCmp() {
         return invListCmp;
     }
+    
+    public ITypeTrait[] getTypeTraits() {
+        return typeTraits;
+    }
 
     public BTree getBTree() {
         return btree;
@@ -235,8 +243,7 @@
         private int currentInvListStartPageId;
         private int currentInvListStartOffset;
         private final ByteArrayAccessibleOutputStream currentInvListTokenBaaos = new ByteArrayAccessibleOutputStream();
-        private final FixedSizeTupleReference currentInvListToken = new FixedSizeTupleReference(
-                invListCmp.getTypeTraits());
+        private final FixedSizeTupleReference currentInvListToken = new FixedSizeTupleReference(typeTraits);
 
         private int currentPageId;
         private ICachedPage currentPage;
@@ -247,7 +254,7 @@
             this.invListBuilder = invListBuilder;
             this.tokenCmp = btree.getMultiComparator();
             this.btreeTupleBuffer = ByteBuffer.allocate(hyracksFrameSize);
-            this.btreeTupleBuilder = new ArrayTupleBuilder(tokenCmp.getFieldCount());
+            this.btreeTupleBuilder = new ArrayTupleBuilder(btree.getFieldCount());
             this.btreeTupleAppender = new FrameTupleAppender(hyracksFrameSize);
             // TODO: serde never used, only need correct number of fields
             // tuple contains (token, start page, end page, start offset, num
@@ -260,10 +267,8 @@
             this.btreeFillFactor = btreeFillFactor;
         }
 
-        public void init(int startPageId, int fileId) throws HyracksDataException {
-            btreeBulkLoadCtx = btree.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR,
-                    btree.getLeafFrameFactory().createFrame(), btree.getInteriorFrameFactory().createFrame(), btree
-                            .getFreePageManager().getMetaDataFrameFactory().createFrame());
+        public void init(int startPageId, int fileId) throws HyracksDataException, TreeIndexException {
+            btreeBulkLoadCtx = btree.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
             currentPageId = startPageId;
             currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
             currentPage.acquireWriteLatch();
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
index 7505ae0..0f88c0b 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
@@ -96,7 +96,7 @@
         interiorFrame = invIndex.getBTree().getInteriorFrameFactory().createFrame();
 
         btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
-        ITypeTrait[] invListFields = invIndex.getInvListElementCmp().getTypeTraits();
+        ITypeTrait[] invListFields = invIndex.getTypeTraits();
         invListFieldsWithCount = new TypeTrait[invListFields.length + 1];
         int tmp = 0;
         for (int i = 0; i < invListFields.length; i++) {
@@ -107,7 +107,7 @@
         invListFieldsWithCount[invListFields.length] = new TypeTrait(4);
         invListKeyLength = tmp;
 
-        btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH);
 
         resultFrameTupleApp = new FixedSizeFrameTupleAppender(ctx.getFrameSize(), invListFieldsWithCount);
         resultFrameTupleAcc = new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), invListFieldsWithCount);
@@ -124,7 +124,7 @@
         // pre-create cursor objects
         for (int i = 0; i < cursorCacheSize; i++) {
             invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
-                    .getInvListsFileId(), invIndex.getInvListElementCmp().getTypeTraits()));
+                    .getInvListsFileId(), invIndex.getTypeTraits()));
         }
 
         queryTokenAppender = new FrameTupleAppender(ctx.getFrameSize());
@@ -174,7 +174,7 @@
             int diff = numQueryTokens - invListCursorCache.size();
             for (int i = 0; i < diff; i++) {
                 invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
-                        .getInvListsFileId(), invIndex.getInvListElementCmp().getTypeTraits()));
+                        .getInvListsFileId(), invIndex.getTypeTraits()));
             }
         }
 
diff --git a/hyracks-storage-am-rtree/.classpath b/hyracks-storage-am-rtree/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-storage-am-rtree/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-storage-am-rtree/.project b/hyracks-storage-am-rtree/.project
deleted file mode 100644
index 4989318..0000000
--- a/hyracks-storage-am-rtree/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-rtree</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
index e3b9b96..dd57986 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
@@ -15,33 +15,25 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.api;
 
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
 
 public interface IRTreeFrame extends ITreeIndexFrame {
 
-	public ITreeIndexTupleReference createTupleReference();
+	public void computeMBR(ISplitKey splitKey);
 
-	public void computeMBR(ISplitKey splitKey, MultiComparator cmp);
+	public void delete(int tupleIndex, MultiComparator cmp);
 
-	public void insert(ITupleReference tuple, MultiComparator cmp,
-			int tupleIndex) throws Exception;
+	public long getPageNsn();
 
-	public void delete(int tupleIndex, MultiComparator cmp) throws Exception;
-
-	public int getPageNsn();
-
-	public void setPageNsn(int pageNsn);
+	public void setPageNsn(long pageNsn);
 
 	public int getRightPage();
 
 	public void setRightPage(int rightPage);
 
-	public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp);
+	public void adjustMBR(ITreeIndexTupleReference[] tuples);
 
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index 4c251d2..2b3065d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -23,7 +23,7 @@
 
 	public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
 
-	public int getBestChildPageId(MultiComparator cmp);
+	public int getBestChildPageId();
 
 	public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex,
 			MultiComparator cmp);
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
index cb7c9d9..8c83636 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
@@ -16,10 +16,8 @@
 package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
 
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
@@ -27,7 +25,6 @@
 import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 
@@ -45,20 +42,8 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, indexFileId, 0, metaDataFrameFactory);
 
-		return new RTree(bufferCache, freePageManager,
-				opDesc.getTreeIndexInteriorFactory(),
-				opDesc.getTreeIndexLeafFactory(), cmp);
-	}
-
-	public MultiComparator createMultiComparator(IBinaryComparator[] comparators)
-			throws HyracksDataException {
-		IPrimitiveValueProvider[] keyValueProvider = new IPrimitiveValueProvider[opDesc
-				.getTreeIndexValueProviderFactories().length];
-		for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
-			keyValueProvider[i] = opDesc.getTreeIndexValueProviderFactories()[i]
-					.createPrimitiveValueProvider();
-		}
-		return new MultiComparator(opDesc.getTreeIndexTypeTraits(),
-				comparators, keyValueProvider);
+		return new RTree(bufferCache, opDesc.getTreeIndexFieldCount(), cmp,
+				freePageManager, opDesc.getTreeIndexInteriorFactory(),
+				opDesc.getTreeIndexLeafFactory());
 	}
 }
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
index f5741c0..d67b2a6 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -24,7 +24,6 @@
 import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
 import edu.uci.ics.hyracks.api.job.JobSpecification;
 import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
@@ -46,11 +45,10 @@
 			ITreeIndexFrameFactory interiorFrameFactory,
 			ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
 			IBinaryComparatorFactory[] comparatorFactories,
-			IPrimitiveValueProviderFactory[] valueProviderFactories,
 			int[] keyFields, ITreeIndexOpHelperFactory opHelperFactory) {
 		super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider,
 				fileSplitProvider, interiorFrameFactory, leafFrameFactory,
-				typeTraits, comparatorFactories, valueProviderFactories,
+				typeTraits, comparatorFactories,
 				opHelperFactory);
 		this.keyFields = keyFields;
 	}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
index a1e5162..4142ebf 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
@@ -29,7 +29,6 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
@@ -101,23 +100,17 @@
                     keySearchComparators[i] = rtree.getCmp().getComparators()[i];
                 }
 
-                IPrimitiveValueProvider[] keyValueProvider = new IPrimitiveValueProvider[keySearchFields];
-                for (int i = 0; i < keySearchFields; i++) {
-                    keyValueProvider[i] = rtree.getCmp().getValueProviders()[i];
-                }
-
-                cmp = new MultiComparator(rtree.getCmp().getTypeTraits(), keySearchComparators, keyValueProvider);
+                cmp = new MultiComparator(keySearchComparators);
 
                 searchPred = new SearchPredicate(searchKey, cmp);
 
                 writeBuffer = treeIndexOpHelper.getHyracksTaskContext().allocateFrame();
-                tb = new ArrayTupleBuilder(rtree.getCmp().getFieldCount());
+                tb = new ArrayTupleBuilder(rtree.getFieldCount());
                 dos = tb.getDataOutput();
                 appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksTaskContext().getFrameSize());
                 appender.reset(writeBuffer, true);
 
-                opCtx = rtree.createOpContext(IndexOp.SEARCH, treeIndexOpHelper.getLeafFrame(),
-                        treeIndexOpHelper.getInteriorFrame(), null);
+                opCtx = rtree.createOpContext(IndexOp.SEARCH);
             } catch (Exception e) {
                 writer.fail();
                 throw e;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index e809884..0486496 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -19,6 +19,7 @@
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
@@ -48,11 +49,12 @@
 	protected static final int nearMinimumOverlapFactor = 32;
 	private static final double doubleEpsilon = computeDoubleEpsilon();
 	private static final int numTuplesEntries = 100;
-
-	public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
-		super(tupleWriter, new UnorderedSlotManager());
-		this.tuples = new ITreeIndexTupleReference[keyFieldCount];
-		for (int i = 0; i < keyFieldCount; i++) {
+	protected final IPrimitiveValueProvider[] keyValueProviders;
+	
+	public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+		super(tupleWriter, new UnorderedSlotManager());		
+		this.tuples = new ITreeIndexTupleReference[keyValueProviders.length];
+		for (int i = 0; i < keyValueProviders.length; i++) {
 			this.tuples[i] = tupleWriter.createTupleReference();
 		}
 		cmpFrameTuple = tupleWriter.createTupleReference();
@@ -63,8 +65,9 @@
 				numTuplesEntries);
 		rec = new Rectangle[4];
 		for (int i = 0; i < 4; i++) {
-			rec[i] = new Rectangle(keyFieldCount / 2);
+			rec[i] = new Rectangle(keyValueProviders.length / 2);
 		}
+		this.keyValueProviders = keyValueProviders;
 	}
 
 	private static double computeDoubleEpsilon() {
@@ -92,13 +95,13 @@
 	}
 
 	@Override
-	public void setPageNsn(int pageNsn) {
-		buf.putInt(pageNsnOff, pageNsn);
+	public void setPageNsn(long pageNsn) {
+		buf.putLong(pageNsnOff, pageNsn);
 	}
 
 	@Override
-	public int getPageNsn() {
-		return buf.getInt(pageNsnOff);
+	public long getPageNsn() {
+		return buf.getLong(pageNsnOff);
 	}
 
 	@Override
@@ -141,22 +144,21 @@
 	}
 
 	public void generateDist(ITupleReference tuple,
-			TupleEntryArrayList entries, Rectangle rec, int start, int end,
-			MultiComparator cmp) {
+			TupleEntryArrayList entries, Rectangle rec, int start, int end) {
 		int j = 0;
 		while (entries.get(j).getTupleIndex() == -1) {
 			j++;
 		}
 		frameTuple.resetByTupleIndex(this, entries.get(j).getTupleIndex());
-		rec.set(frameTuple, cmp);
+		rec.set(frameTuple, keyValueProviders);
 		for (int i = start; i < end; ++i) {
 			if (i != j) {
 				if (entries.get(i).getTupleIndex() != -1) {
 					frameTuple.resetByTupleIndex(this, entries.get(i)
 							.getTupleIndex());
-					rec.enlarge(frameTuple, cmp);
+					rec.enlarge(frameTuple, keyValueProviders);
 				} else {
-					rec.enlarge(tuple, cmp);
+					rec.enlarge(tuple, keyValueProviders);
 				}
 			}
 		}
@@ -167,28 +169,20 @@
 		return tupleWriter.createTupleReference();
 	}
 
-	public void adjustMBRImpl(ITreeIndexTupleReference[] tuples,
-			MultiComparator cmp) {
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
+	public void adjustMBRImpl(ITreeIndexTupleReference[] tuples) {
+		int maxFieldPos = keyValueProviders.length / 2;
 		for (int i = 1; i < getTupleCount(); i++) {
 			frameTuple.resetByTupleIndex(this, i);
 			for (int j = 0; j < maxFieldPos; j++) {
 				int k = maxFieldPos + j;
-				int c = cmp.getComparators()[j].compare(
-						frameTuple.getFieldData(j),
-						frameTuple.getFieldStart(j),
-						frameTuple.getFieldLength(j),
-						tuples[j].getFieldData(j), tuples[j].getFieldStart(j),
-						tuples[j].getFieldLength(j));
-				if (c < 0) {
+				double valA = keyValueProviders[j].getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+				double valB = keyValueProviders[j].getValue(tuples[j].getFieldData(j), tuples[j].getFieldStart(j));
+				if (valA < valB) {
 					tuples[j].resetByTupleIndex(this, i);
 				}
-				c = cmp.getComparators()[k].compare(frameTuple.getFieldData(k),
-						frameTuple.getFieldStart(k),
-						frameTuple.getFieldLength(k),
-						tuples[k].getFieldData(k), tuples[k].getFieldStart(k),
-						tuples[k].getFieldLength(k));
-				if (c > 0) {
+				valA = keyValueProviders[k].getValue(frameTuple.getFieldData(k), frameTuple.getFieldStart(k));
+				valB = keyValueProviders[k].getValue(tuples[k].getFieldData(k), tuples[k].getFieldStart(k));
+				if (valA > valB) {
 					tuples[k].resetByTupleIndex(this, i);
 				}
 			}
@@ -196,18 +190,17 @@
 	}
 
 	@Override
-	public void computeMBR(ISplitKey splitKey, MultiComparator cmp) {
+	public void computeMBR(ISplitKey splitKey) {
 		RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
 		RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
-		frameTuple.setFieldCount(cmp.getFieldCount());
 
 		int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
 		frameTuple.resetByTupleOffset(buf, tupleOff);
 		int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0,
-				cmp.getKeyFieldCount());
+				keyValueProviders.length);
 
 		splitKey.initData(splitKeySize);
-		this.adjustMBR(tuples, cmp);
+		this.adjustMBR(tuples);
 		rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0,
 				rTreeSplitKey.getLeftPageBuffer(), 0);
 		rTreeSplitKey.getLeftTuple().resetByTupleOffset(
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index 38d2a46..96d739c 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -15,20 +15,19 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
 import java.util.ArrayList;
 import java.util.Collections;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
@@ -40,685 +39,579 @@
 import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
 
-public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements
-		IRTreeInteriorFrame {
+public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
 
-	private static final int childPtrSize = 4;
+    private static final int childPtrSize = 4;
+    private static IBinaryComparator childPtrCmp = IntegerBinaryComparatorFactory.INSTANCE
+			.createBinaryComparator();
+    
+    public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+        super(tupleWriter, keyValueProviders);
+        frameTuple.setFieldCount(keyValueProviders.length);
+    }
 
-	public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter,
-			int keyFieldCount) {
-		super(tupleWriter, keyFieldCount);
-	}
+    @Override
+    public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
+        cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
 
-	@Override
-	public String printKeys(MultiComparator cmp,
-			ISerializerDeserializer[] fields) throws HyracksDataException {
-		StringBuilder strBuilder = new StringBuilder();
-		int tupleCount = buf.getInt(tupleCountOff);
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		for (int i = 0; i < tupleCount; i++) {
-			frameTuple.resetByTupleIndex(this, i);
-			for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
-				ByteArrayInputStream inStream = new ByteArrayInputStream(
-						frameTuple.getFieldData(j),
-						frameTuple.getFieldStart(j),
-						frameTuple.getFieldLength(j));
-				DataInput dataIn = new DataInputStream(inStream);
-				Object o = fields[j].deserialize(dataIn);
-				strBuilder.append(o.toString() + " ");
-			}
-			strBuilder.append(" | ");
-		}
-		strBuilder.append("\n");
-		return strBuilder.toString();
-	}
+        int bestChild = 0;
+        double minEnlargedArea = Double.MAX_VALUE;
 
-	@Override
-	public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
-		cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        // the children pointers in the node point to leaves
+        if (getLevel() == 1) {
+            // find least overlap enlargement, use minimum enlarged area to
+            // break tie, if tie still exists use minimum area to break it
+            for (int i = 0; i < getTupleCount(); ++i) {
+                frameTuple.resetByTupleIndex(this, i);
+                double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+                tupleEntries1.add(i, enlargedArea);
+                if (enlargedArea < minEnlargedArea) {
+                    minEnlargedArea = enlargedArea;
+                    bestChild = i;
+                }
+            }
+            if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
+                minEnlargedArea = Double.MAX_VALUE;
+                int k;
+                if (getTupleCount() > nearMinimumOverlapFactor) {
+                    // sort the entries based on their area enlargement needed
+                    // to include the object
+                    tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
+                    k = nearMinimumOverlapFactor;
+                } else {
+                    k = getTupleCount();
+                }
 
-		int bestChild = 0;
-		double minEnlargedArea = Double.MAX_VALUE;
+                double minOverlap = Double.MAX_VALUE;
+                int id = 0;
+                for (int i = 0; i < k; ++i) {
+                    double difference = 0.0;
+                    for (int j = 0; j < getTupleCount(); ++j) {
+                        frameTuple.resetByTupleIndex(this, j);
+                        cmpFrameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
 
-		// the children pointers in the node point to leaves
-		if (getLevel() == 1) {
-			// find least overlap enlargement, use minimum enlarged area to
-			// break tie, if tie still exists use minimum area to break it
-			for (int i = 0; i < getTupleCount(); ++i) {
-				frameTuple.resetByTupleIndex(this, i);
-				double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
-				tupleEntries1.add(i, enlargedArea);
-				if (enlargedArea < minEnlargedArea) {
-					minEnlargedArea = enlargedArea;
-					bestChild = i;
-				}
-			}
-			if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon()
-					|| minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
-				minEnlargedArea = Double.MAX_VALUE;
-				int k;
-				if (getTupleCount() > nearMinimumOverlapFactor) {
-					// sort the entries based on their area enlargement needed
-					// to include the object
-					tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
-					k = nearMinimumOverlapFactor;
-				} else {
-					k = getTupleCount();
-				}
+                        int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
+                        if (c != 0) {
+                            double intersection = overlappedArea(frameTuple, tuple, cmpFrameTuple, cmp);
+                            if (intersection != 0.0) {
+                                difference += intersection - overlappedArea(frameTuple, null, cmpFrameTuple, cmp);
+                            }
+                        } else {
+                            id = j;
+                        }
+                    }
 
-				double minOverlap = Double.MAX_VALUE;
-				int id = 0;
-				for (int i = 0; i < k; ++i) {
-					double difference = 0.0;
-					for (int j = 0; j < getTupleCount(); ++j) {
-						frameTuple.resetByTupleIndex(this, j);
-						cmpFrameTuple.resetByTupleIndex(this, tupleEntries1
-								.get(i).getTupleIndex());
+                    double enlargedArea = enlargedArea(cmpFrameTuple, tuple, cmp);
+                    if (difference < minOverlap) {
+                        minOverlap = difference;
+                        minEnlargedArea = enlargedArea;
+                        bestChild = id;
+                    } else if (difference == minOverlap) {
+                        if (enlargedArea < minEnlargedArea) {
+                            minEnlargedArea = enlargedArea;
+                            bestChild = id;
+                        } else if (enlargedArea == minEnlargedArea) {
+                            double area = area(cmpFrameTuple, cmp);
+                            frameTuple.resetByTupleIndex(this, bestChild);
+                            double minArea = area(frameTuple, cmp);
+                            if (area < minArea) {
+                                bestChild = id;
+                            }
+                        }
+                    }
+                }
+            }
+        } else { // find minimum enlarged area, use minimum area to break tie
+            for (int i = 0; i < getTupleCount(); i++) {
+                frameTuple.resetByTupleIndex(this, i);
+                double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+                if (enlargedArea < minEnlargedArea) {
+                    minEnlargedArea = enlargedArea;
+                    bestChild = i;
+                } else if (enlargedArea == minEnlargedArea) {
+                    double area = area(frameTuple, cmp);
+                    frameTuple.resetByTupleIndex(this, bestChild);
+                    double minArea = area(frameTuple, cmp);
+                    if (area < minArea) {
+                        bestChild = i;
+                    }
+                }
+            }
+        }
+        tupleEntries1.clear();
 
-						int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
-						if (c != 0) {
-							double intersection = overlappedArea(frameTuple,
-									tuple, cmpFrameTuple, cmp);
-							if (intersection != 0.0) {
-								difference += intersection
-										- overlappedArea(frameTuple, null,
-												cmpFrameTuple, cmp);
-							}
-						} else {
-							id = j;
-						}
-					}
+        frameTuple.resetByTupleIndex(this, bestChild);
+        if (minEnlargedArea > 0.0) {
+            return true;
+        } else {
+            return false;
+        }
+    }
 
-					double enlargedArea = enlargedArea(cmpFrameTuple, tuple,
-							cmp);
-					if (difference < minOverlap) {
-						minOverlap = difference;
-						minEnlargedArea = enlargedArea;
-						bestChild = id;
-					} else if (difference == minOverlap) {
-						if (enlargedArea < minEnlargedArea) {
-							minEnlargedArea = enlargedArea;
-							bestChild = id;
-						} else if (enlargedArea == minEnlargedArea) {
-							double area = area(cmpFrameTuple, cmp);
-							frameTuple.resetByTupleIndex(this, bestChild);
-							double minArea = area(frameTuple, cmp);
-							if (area < minArea) {
-								bestChild = id;
-							}
-						}
-					}
-				}
-			}
-		} else { // find minimum enlarged area, use minimum area to break tie
-			for (int i = 0; i < getTupleCount(); i++) {
-				frameTuple.resetByTupleIndex(this, i);
-				double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
-				if (enlargedArea < minEnlargedArea) {
-					minEnlargedArea = enlargedArea;
-					bestChild = i;
-				} else if (enlargedArea == minEnlargedArea) {
-					double area = area(frameTuple, cmp);
-					frameTuple.resetByTupleIndex(this, bestChild);
-					double minArea = area(frameTuple, cmp);
-					if (area < minArea) {
-						bestChild = i;
-					}
-				}
-			}
-		}
-		tupleEntries1.clear();
+    @Override
+    public int getBestChildPageId() {
+        return buf.getInt(getChildPointerOff(frameTuple));
+    }
 
-		frameTuple.resetByTupleIndex(this, bestChild);
-		if (minEnlargedArea > 0.0) {
-			return true;
-		} else {
-			return false;
-		}
-	}
+    @Override
+    public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        for (int i = 0; i < getTupleCount(); i++) {
+            frameTuple.resetByTupleIndex(this, i);
+            int c = pointerCmp(frameTuple, tuple, cmp);
+            if (c == 0) {
+                return i;
+            }
+        }
+        return -1;
+    }
 
-	@Override
-	public int getBestChildPageId(MultiComparator cmp) {
-		return buf.getInt(getChildPointerOff(frameTuple, cmp));
-	}
+    @Override
+    public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j));
+            if (c > 0) {
+                return -1;
+            }
+            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+            if (c < 0) {
+                return -1;
+            }
+        }
+        return buf.getInt(getChildPointerOff(frameTuple));
+    }
 
-	@Override
-	public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		for (int i = 0; i < getTupleCount(); i++) {
-			frameTuple.resetByTupleIndex(this, i);
-			int c = pointerCmp(frameTuple, tuple, cmp);
-			if (c == 0) {
-				return i;
-			}
-		}
-		return -1;
-	}
+    @Override
+    public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        for (int i = 0; i < getTupleCount(); i++) {
+            frameTuple.resetByTupleIndex(this, i);
 
-	@Override
-	public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex,
-			MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		frameTuple.resetByTupleIndex(this, tupleIndex);
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
-					tuple.getFieldStart(i), tuple.getFieldLength(i),
-					frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-					frameTuple.getFieldLength(j));
-			if (c > 0) {
-				return -1;
-			}
-			c = cmp.getComparators()[i].compare(tuple.getFieldData(j),
-					tuple.getFieldStart(j), tuple.getFieldLength(j),
-					frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
-					frameTuple.getFieldLength(i));
-			if (c < 0) {
-				return -1;
-			}
-		}
-		return buf.getInt(getChildPointerOff(frameTuple, cmp));
-	}
+            int c = pointerCmp(frameTuple, tuple, cmp);
+            if (c == 0) {
+                return i;
+            } else {
+                int pageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+                        getChildPointerOff(frameTuple));
+                traverseList.add(pageId, -1, parentIndex);
+            }
+        }
+        return -1;
+    }
 
-	@Override
-	public int findTupleByPointer(ITupleReference tuple, PathList traverseList,
-			int parentIndex, MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		for (int i = 0; i < getTupleCount(); i++) {
-			frameTuple.resetByTupleIndex(this, i);
+    @Override
+    public boolean compact() {
+        resetSpaceParams();
 
-			int c = pointerCmp(frameTuple, tuple, cmp);
-			if (c == 0) {
-				return i;
-			} else {
-				int pageId = IntegerSerializerDeserializer.getInt(
-						frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
-						getChildPointerOff(frameTuple, cmp));
-				traverseList.add(pageId, -1, parentIndex);
-			}
-		}
-		return -1;
-	}
+        int tupleCount = buf.getInt(tupleCountOff);
+        int freeSpace = buf.getInt(freeSpaceOff);
 
-	@Override
-	public boolean compact(MultiComparator cmp) {
-		resetSpaceParams();
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+        sortedTupleOffs.ensureCapacity(tupleCount);
+        for (int i = 0; i < tupleCount; i++) {
+            int slotOff = slotManager.getSlotOff(i);
+            int tupleOff = slotManager.getTupleOff(slotOff);
+            sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+        }
+        Collections.sort(sortedTupleOffs);
 
-		int tupleCount = buf.getInt(tupleCountOff);
-		int freeSpace = buf.getInt(freeSpaceOff);
+        for (int i = 0; i < sortedTupleOffs.size(); i++) {
+            int tupleOff = sortedTupleOffs.get(i).tupleOff;
+            frameTuple.resetByTupleOffset(buf, tupleOff);
 
-		ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
-		sortedTupleOffs.ensureCapacity(tupleCount);
-		for (int i = 0; i < tupleCount; i++) {
-			int slotOff = slotManager.getSlotOff(i);
-			int tupleOff = slotManager.getTupleOff(slotOff);
-			sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
-		}
-		Collections.sort(sortedTupleOffs);
+            int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+                    + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+            int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+            System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
 
-		for (int i = 0; i < sortedTupleOffs.size(); i++) {
-			int tupleOff = sortedTupleOffs.get(i).tupleOff;
-			frameTuple.resetByTupleOffset(buf, tupleOff);
+            slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+            freeSpace += tupleLength;
+        }
 
-			int tupleEndOff = frameTuple.getFieldStart(frameTuple
-					.getFieldCount() - 1)
-					+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
-			int tupleLength = tupleEndOff - tupleOff + childPtrSize;
-			System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace,
-					tupleLength);
+        buf.putInt(freeSpaceOff, freeSpace);
+        buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
 
-			slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
-			freeSpace += tupleLength;
-		}
+        return false;
+    }
 
-		buf.putInt(freeSpaceOff, freeSpace);
-		buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount
-				* slotManager.getSlotSize());
+    @Override
+    public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
+        int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
+                                                                             // the
+                                                                             // child
+                                                                             // pointer
+        if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+                - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+            return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+        else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+            return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+        else
+            return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+    }
 
-		return false;
-	}
+    @Override
+    public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        if (tupleIndex == -1) {
+            tupleIndex = findTupleByPointer(tuple, cmp);
+        }
+        if (tupleIndex != -1) {
+            tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
+        }
+    }
 
-	@Override
-	public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
-			MultiComparator cmp) {
-		int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
-																				// the
-																				// child
-																				// pointer
-		if (bytesRequired + slotManager.getSlotSize() <= buf.capacity()
-				- buf.getInt(freeSpaceOff)
-				- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
-			return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-		else if (bytesRequired + slotManager.getSlotSize() <= buf
-				.getInt(totalFreeSpaceOff))
-			return FrameOpSpaceStatus.SUFFICIENT_SPACE;
-		else
-			return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
-	}
+    private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
+    	return childPtrCmp.compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1),
+                getChildPointerOff(tupleA), childPtrSize, tupleB.getFieldData(cmp.getKeyFieldCount() - 1),
+                getChildPointerOff(tupleB), childPtrSize);
+    }
+    
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+        RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
 
-	@Override
-	public void adjustKey(ITupleReference tuple, int tupleIndex,
-			MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		if (tupleIndex == -1) {
-			tupleIndex = findTupleByPointer(tuple, cmp);
-		}
-		if (tupleIndex != -1) {
-			tupleWriter.writeTuple(tuple, buf, getTupleOffset(tupleIndex));
-		}
-	}
+        // calculations are based on the R*-tree paper
+        int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+        int splitDistribution = getTupleCount() - (2 * m) + 2;
 
-	private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB,
-			MultiComparator cmp) {
-		return cmp.getIntCmp().compare(
-				tupleA.getFieldData(cmp.getKeyFieldCount() - 1),
-				getChildPointerOff(tupleA, cmp), childPtrSize,
-				tupleB.getFieldData(cmp.getKeyFieldCount() - 1),
-				getChildPointerOff(tupleB, cmp), childPtrSize);
-	}
+        // to calculate the minimum margin in order to pick the split axis
+        double minMargin = Double.MAX_VALUE;
+        int splitAxis = 0, sortOrder = 0;
 
-	@Override
-	public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
-			MultiComparator cmp, ISplitKey splitKey) throws Exception {
+        int maxFieldPos = keyValueProviders.length / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            for (int k = 0; k < getTupleCount(); ++k) {
 
-		rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
+                frameTuple.resetByTupleIndex(this, k);
+                double LowerKey = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
+                        frameTuple.getFieldStart(i));
+                double UpperKey = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
+                        frameTuple.getFieldStart(j));
 
-		RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
-		RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
-		RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame
-				.getTupleWriter());
+                tupleEntries1.add(k, LowerKey);
+                tupleEntries2.add(k, UpperKey);
+            }
+            double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+            double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
 
-		// calculations are based on the R*-tree paper
-		int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
-		int splitDistribution = getTupleCount() - (2 * m) + 2;
+            tupleEntries1.add(-1, LowerKey);
+            tupleEntries2.add(-1, UpperKey);
 
-		// to calculate the minimum margin in order to pick the split axis
-		double minMargin = Double.MAX_VALUE;
-		int splitAxis = 0, sortOrder = 0;
+            tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+            tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			for (int k = 0; k < getTupleCount(); ++k) {
+            double lowerMargin = 0.0, upperMargin = 0.0;
+            // generate distribution
+            for (int k = 1; k <= splitDistribution; ++k) {
+                int d = m - 1 + k;
 
-				frameTuple.resetByTupleIndex(this, k);
-				double LowerKey = cmp.getValueProviders()[i]
-						.getValue(frameTuple.getFieldData(i),
-								frameTuple.getFieldStart(i));
-				double UpperKey = cmp.getValueProviders()[j]
-						.getValue(frameTuple.getFieldData(j),
-								frameTuple.getFieldStart(j));
+                generateDist(tuple, tupleEntries1, rec[0], 0, d);
+                generateDist(tuple, tupleEntries2, rec[1], 0, d);
+                generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+                generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
 
-				tupleEntries1.add(k, LowerKey);
-				tupleEntries2.add(k, UpperKey);
-			}
-			double LowerKey = cmp.getValueProviders()[i].getValue(
-					tuple.getFieldData(i), tuple.getFieldStart(i));
-			double UpperKey = cmp.getValueProviders()[j].getValue(
-					tuple.getFieldData(j), tuple.getFieldStart(j));
+                // calculate the margin of the distributions
+                lowerMargin += rec[0].margin() + rec[2].margin();
+                upperMargin += rec[1].margin() + rec[3].margin();
+            }
+            double margin = Math.min(lowerMargin, upperMargin);
 
-			tupleEntries1.add(-1, LowerKey);
-			tupleEntries2.add(-1, UpperKey);
+            // store minimum margin as split axis
+            if (margin < minMargin) {
+                minMargin = margin;
+                splitAxis = i;
+                sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+            }
 
-			tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-			tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+            tupleEntries1.clear();
+            tupleEntries2.clear();
+        }
 
-			double lowerMargin = 0.0, upperMargin = 0.0;
-			// generate distribution
-			for (int k = 1; k <= splitDistribution; ++k) {
-				int d = m - 1 + k;
-
-				generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
-				generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
-				generateDist(tuple, tupleEntries1, rec[2], d,
-						getTupleCount() + 1, cmp);
-				generateDist(tuple, tupleEntries2, rec[3], d,
-						getTupleCount() + 1, cmp);
-
-				// calculate the margin of the distributions
-				lowerMargin += rec[0].margin() + rec[2].margin();
-				upperMargin += rec[1].margin() + rec[3].margin();
-			}
-			double margin = Math.min(lowerMargin, upperMargin);
-
-			// store minimum margin as split axis
-			if (margin < minMargin) {
-				minMargin = margin;
-				splitAxis = i;
-				sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
-			}
-
-			tupleEntries1.clear();
-			tupleEntries2.clear();
-		}
-
-		for (int i = 0; i < getTupleCount(); ++i) {
-			frameTuple.resetByTupleIndex(this, i);
-			double key = cmp.getValueProviders()[splitAxis + sortOrder]
-					.getValue(frameTuple.getFieldData(splitAxis + sortOrder),
-							frameTuple.getFieldStart(splitAxis + sortOrder));
-			tupleEntries1.add(i, key);
-		}
-		double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
-				tuple.getFieldData(splitAxis + sortOrder),
-				tuple.getFieldStart(splitAxis + sortOrder));
-		tupleEntries1.add(-1, key);
-		tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+        for (int i = 0; i < getTupleCount(); ++i) {
+            frameTuple.resetByTupleIndex(this, i);
+            double key = keyValueProviders[splitAxis + sortOrder].getValue(
+                    frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+            tupleEntries1.add(i, key);
+        }
+        double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+                tuple.getFieldStart(splitAxis + sortOrder));
+        tupleEntries1.add(-1, key);
+        tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
 
-		double minArea = Double.MAX_VALUE;
-		double minOverlap = Double.MAX_VALUE;
-		int splitPoint = 0;
-		for (int i = 1; i <= splitDistribution; ++i) {
-			int d = m - 1 + i;
+        double minArea = Double.MAX_VALUE;
+        double minOverlap = Double.MAX_VALUE;
+        int splitPoint = 0;
+        for (int i = 1; i <= splitDistribution; ++i) {
+            int d = m - 1 + i;
 
-			generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
-			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
-					cmp);
+            generateDist(tuple, tupleEntries1, rec[0], 0, d);
+            generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
 
-			double overlap = rec[0].overlappedArea(rec[2]);
-			if (overlap < minOverlap) {
-				splitPoint = d;
-				minOverlap = overlap;
-				minArea = rec[0].area() + rec[2].area();
-			} else if (overlap == minOverlap) {
-				double area = rec[0].area() + rec[2].area();
-				if (area < minArea) {
-					splitPoint = d;
-					minArea = area;
-				}
-			}
-		}
-		int startIndex, endIndex;
-		if (splitPoint < (getTupleCount() + 1) / 2) {
-			startIndex = 0;
-			endIndex = splitPoint;
-		} else {
-			startIndex = splitPoint;
-			endIndex = (getTupleCount() + 1);
-		}
-		boolean tupleInserted = false;
-		int totalBytes = 0, numOfDeletedTuples = 0;
-		for (int i = startIndex; i < endIndex; i++) {
-			if (tupleEntries1.get(i).getTupleIndex() != -1) {
-				frameTuple.resetByTupleIndex(this, tupleEntries1.get(i)
-						.getTupleIndex());
-				rightFrame.insert(frameTuple, cmp, -1);
-				((UnorderedSlotManager) slotManager).modifySlot(slotManager
-						.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
-				totalBytes += tupleWriter.bytesRequired(frameTuple)
-						+ childPtrSize;
-				numOfDeletedTuples++;
-			} else {
-				rightFrame.insert(tuple, cmp, -1);
-				tupleInserted = true;
-			}
-		}
+            double overlap = rec[0].overlappedArea(rec[2]);
+            if (overlap < minOverlap) {
+                splitPoint = d;
+                minOverlap = overlap;
+                minArea = rec[0].area() + rec[2].area();
+            } else if (overlap == minOverlap) {
+                double area = rec[0].area() + rec[2].area();
+                if (area < minArea) {
+                    splitPoint = d;
+                    minArea = area;
+                }
+            }
+        }
+        int startIndex, endIndex;
+        if (splitPoint < (getTupleCount() + 1) / 2) {
+            startIndex = 0;
+            endIndex = splitPoint;
+        } else {
+            startIndex = splitPoint;
+            endIndex = (getTupleCount() + 1);
+        }
+        boolean tupleInserted = false;
+        int totalBytes = 0, numOfDeletedTuples = 0;
+        for (int i = startIndex; i < endIndex; i++) {
+            if (tupleEntries1.get(i).getTupleIndex() != -1) {
+                frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+                rightFrame.insert(frameTuple, -1);
+                ((UnorderedSlotManager) slotManager).modifySlot(
+                        slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+                totalBytes += tupleWriter.bytesRequired(frameTuple) + childPtrSize;
+                numOfDeletedTuples++;
+            } else {
+                rightFrame.insert(tuple, -1);
+                tupleInserted = true;
+            }
+        }
 
-		((UnorderedSlotManager) slotManager).deleteEmptySlots();
+        ((UnorderedSlotManager) slotManager).deleteEmptySlots();
 
-		// maintain space information
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
-				+ totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
+        // maintain space information
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+                + (slotManager.getSlotSize() * numOfDeletedTuples));
 
-		// compact both pages
-		rightFrame.compact(cmp);
-		compact(cmp);
+        // compact both pages
+        rightFrame.compact();
+        compact();
 
-		if (!tupleInserted) {
-			insert(tuple, cmp, -1);
-		}
+        if (!tupleInserted) {
+            insert(tuple, -1);
+        }
 
-		int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-		frameTuple.resetByTupleOffset(buf, tupleOff);
-		int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0,
-				cmp.getKeyFieldCount());
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
 
-		splitKey.initData(splitKeySize);
-		this.adjustMBR(tuples, cmp);
-		rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0,
-				rTreeSplitKey.getLeftPageBuffer(), 0);
-		rTreeSplitKey.getLeftTuple().resetByTupleOffset(
-				rTreeSplitKey.getLeftPageBuffer(), 0);
+        splitKey.initData(splitKeySize);
+        this.adjustMBR(tuples);
+        rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+        rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
 
-		((IRTreeFrame) rightFrame).adjustMBR(
-				((RTreeNSMFrame) rightFrame).getTuples(), cmp);
-		rTreeTupleWriterRightFrame.writeTupleFields(
-				((RTreeNSMFrame) rightFrame).getTuples(), 0,
-				rTreeSplitKey.getRightPageBuffer(), 0);
-		rTreeSplitKey.getRightTuple().resetByTupleOffset(
-				rTreeSplitKey.getRightPageBuffer(), 0);
+        ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples());
+        rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+                rTreeSplitKey.getRightPageBuffer(), 0);
+        rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
 
-		tupleEntries1.clear();
-		tupleEntries2.clear();
-		return 0;
-	}
+        tupleEntries1.clear();
+        tupleEntries2.clear();
+    }
 
-	private int getChildPointerOff(ITupleReference tuple, MultiComparator cmp) {
-		return tuple.getFieldStart(cmp.getKeyFieldCount() - 1)
-				+ tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
-	}
+    private int getChildPointerOff(ITupleReference tuple) {
+        return tuple.getFieldStart(tuple.getFieldCount() - 1) + tuple.getFieldLength(tuple.getFieldCount() - 1);
+    }
 
-	@Override
-	public void insert(ITupleReference tuple, MultiComparator cmp,
-			int tupleIndex) throws Exception {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
-		int freeSpace = buf.getInt(freeSpaceOff);
-		int bytesWritten = tupleWriter.writeTupleFields(tuple, 0,
-				cmp.getKeyFieldCount(), buf, freeSpace);
-		System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1),
-				getChildPointerOff(tuple, cmp), buf.array(), freeSpace
-						+ bytesWritten, childPtrSize);
-		int tupleSize = bytesWritten + childPtrSize;
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        frameTuple.setFieldCount(tuple.getFieldCount());
+        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+        int freeSpace = buf.getInt(freeSpaceOff);
+        int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, tuple.getFieldCount(), buf, freeSpace);
+        System.arraycopy(tuple.getFieldData(tuple.getFieldCount() - 1), getChildPointerOff(tuple), buf.array(),
+                freeSpace + bytesWritten, childPtrSize);
+        int tupleSize = bytesWritten + childPtrSize;
 
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-		buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize
-				- slotManager.getSlotSize());
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
 
-	}
+    }
 
-	@Override
-	public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		int slotOff = slotManager.getSlotOff(tupleIndex);
+    @Override
+    public void delete(int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        int slotOff = slotManager.getSlotOff(tupleIndex);
 
-		int tupleOff = slotManager.getTupleOff(slotOff);
-		frameTuple.resetByTupleOffset(buf, tupleOff);
-		int tupleSize = tupleWriter.bytesRequired(frameTuple);
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
 
-		// perform deletion (we just do a memcpy to overwrite the slot)
-		int slotStartOff = slotManager.getSlotEndOff();
-		int length = slotOff - slotStartOff;
-		System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff
-				+ slotManager.getSlotSize(), length);
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
 
-		// maintain space information
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize
-				+ childPtrSize + slotManager.getSlotSize());
-	}
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff,
+                buf.getInt(totalFreeSpaceOff) + tupleSize + childPtrSize + slotManager.getSlotSize());
+    }
 
-	@Override
-	public boolean recomputeMBR(ITupleReference tuple, int tupleIndex,
-			MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getKeyFieldCount());
-		frameTuple.resetByTupleIndex(this, tupleIndex);
+    @Override
+    public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.setFieldCount(cmp.getKeyFieldCount());
+        frameTuple.resetByTupleIndex(this, tupleIndex);
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i),
-					frameTuple.getFieldStart(i), frameTuple.getFieldLength(i),
-					tuple.getFieldData(i), tuple.getFieldStart(i),
-					tuple.getFieldLength(i));
-			if (c != 0) {
-				return true;
-			}
-			c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j),
-					frameTuple.getFieldStart(j), frameTuple.getFieldLength(j),
-					tuple.getFieldData(j), tuple.getFieldStart(j),
-					tuple.getFieldLength(j));
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            if (c != 0) {
+                return true;
+            }
+            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+                    tuple.getFieldLength(j));
 
-			if (c != 0) {
-				return true;
-			}
-		}
-		return false;
-	}
+            if (c != 0) {
+                return true;
+            }
+        }
+        return false;
+    }
 
-	private double overlappedArea(ITupleReference tuple1,
-			ITupleReference tupleToBeInserted, ITupleReference tuple2,
-			MultiComparator cmp) {
-		double area = 1.0;
-		double f1, f2;
+    private double overlappedArea(ITupleReference tuple1, ITupleReference tupleToBeInserted, ITupleReference tuple2,
+            MultiComparator cmp) {
+        double area = 1.0;
+        double f1, f2;
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			double pHigh1, pLow1;
-			if (tupleToBeInserted != null) {
-				int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i),
-						tuple1.getFieldStart(i), tuple1.getFieldLength(i),
-						tupleToBeInserted.getFieldData(i),
-						tupleToBeInserted.getFieldStart(i),
-						tupleToBeInserted.getFieldLength(i));
-				if (c < 0) {
-					pLow1 = cmp.getValueProviders()[i].getValue(
-							tuple1.getFieldData(i), tuple1.getFieldStart(i));
-				} else {
-					pLow1 = cmp.getValueProviders()[i].getValue(
-							tupleToBeInserted.getFieldData(i),
-							tupleToBeInserted.getFieldStart(i));
-				}
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            double pHigh1, pLow1;
+            if (tupleToBeInserted != null) {
+                int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+                        tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
+                        tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+                if (c < 0) {
+                    pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+                } else {
+                    pLow1 = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
+                            tupleToBeInserted.getFieldStart(i));
+                }
 
-				c = cmp.getComparators()[j].compare(tuple1.getFieldData(j),
-						tuple1.getFieldStart(j), tuple1.getFieldLength(j),
-						tupleToBeInserted.getFieldData(j),
-						tupleToBeInserted.getFieldStart(j),
-						tupleToBeInserted.getFieldLength(j));
-				if (c > 0) {
-					pHigh1 = cmp.getValueProviders()[j].getValue(
-							tuple1.getFieldData(j), tuple1.getFieldStart(j));
-				} else {
-					pHigh1 = cmp.getValueProviders()[j].getValue(
-							tupleToBeInserted.getFieldData(j),
-							tupleToBeInserted.getFieldStart(j));
-				}
-			} else {
-				pLow1 = cmp.getValueProviders()[i].getValue(
-						tuple1.getFieldData(i), tuple1.getFieldStart(i));
-				pHigh1 = cmp.getValueProviders()[j].getValue(
-						tuple1.getFieldData(j), tuple1.getFieldStart(j));
-			}
+                c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+                        tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
+                        tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+                if (c > 0) {
+                    pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+                } else {
+                    pHigh1 = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
+                            tupleToBeInserted.getFieldStart(j));
+                }
+            } else {
+                pLow1 = keyValueProviders[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+                pHigh1 = keyValueProviders[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+            }
 
-			double pLow2 = cmp.getValueProviders()[i].getValue(
-					tuple2.getFieldData(i), tuple2.getFieldStart(i));
-			double pHigh2 = cmp.getValueProviders()[j].getValue(
-					tuple2.getFieldData(j), tuple2.getFieldStart(j));
+            double pLow2 = keyValueProviders[i].getValue(tuple2.getFieldData(i), tuple2.getFieldStart(i));
+            double pHigh2 = keyValueProviders[j].getValue(tuple2.getFieldData(j), tuple2.getFieldStart(j));
 
-			if (pLow1 > pHigh2 || pHigh1 < pLow2) {
-				return 0.0;
-			}
+            if (pLow1 > pHigh2 || pHigh1 < pLow2) {
+                return 0.0;
+            }
 
-			f1 = Math.max(pLow1, pLow2);
-			f2 = Math.min(pHigh1, pHigh2);
-			area *= f2 - f1;
-		}
-		return area;
-	}
+            f1 = Math.max(pLow1, pLow2);
+            f2 = Math.min(pHigh1, pHigh2);
+            area *= f2 - f1;
+        }
+        return area;
+    }
 
-	private double enlargedArea(ITupleReference tuple,
-			ITupleReference tupleToBeInserted, MultiComparator cmp) {
-		double areaBeforeEnlarge = area(tuple, cmp);
-		double areaAfterEnlarge = 1.0;
+    private double enlargedArea(ITupleReference tuple, ITupleReference tupleToBeInserted, MultiComparator cmp) {
+        double areaBeforeEnlarge = area(tuple, cmp);
+        double areaAfterEnlarge = 1.0;
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			double pHigh, pLow;
-			int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
-					tuple.getFieldStart(i), tuple.getFieldLength(i),
-					tupleToBeInserted.getFieldData(i),
-					tupleToBeInserted.getFieldStart(i),
-					tupleToBeInserted.getFieldLength(i));
-			if (c < 0) {
-				pLow = cmp.getValueProviders()[i].getValue(
-						tuple.getFieldData(i), tuple.getFieldStart(i));
-			} else {
-				pLow = cmp.getValueProviders()[i].getValue(
-						tupleToBeInserted.getFieldData(i),
-						tupleToBeInserted.getFieldStart(i));
-			}
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            double pHigh, pLow;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+                    tupleToBeInserted.getFieldLength(i));
+            if (c < 0) {
+                pLow = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+            } else {
+                pLow = keyValueProviders[i].getValue(tupleToBeInserted.getFieldData(i),
+                        tupleToBeInserted.getFieldStart(i));
+            }
 
-			c = cmp.getComparators()[j].compare(tuple.getFieldData(j),
-					tuple.getFieldStart(j), tuple.getFieldLength(j),
-					tupleToBeInserted.getFieldData(j),
-					tupleToBeInserted.getFieldStart(j),
-					tupleToBeInserted.getFieldLength(j));
-			if (c > 0) {
-				pHigh = cmp.getValueProviders()[j].getValue(
-						tuple.getFieldData(j), tuple.getFieldStart(j));
-			} else {
-				pHigh = cmp.getValueProviders()[j].getValue(
-						tupleToBeInserted.getFieldData(j),
-						tupleToBeInserted.getFieldStart(j));
-			}
-			areaAfterEnlarge *= pHigh - pLow;
-		}
-		return areaAfterEnlarge - areaBeforeEnlarge;
-	}
+            c = cmp.getComparators()[j].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+                    tupleToBeInserted.getFieldLength(j));
+            if (c > 0) {
+                pHigh = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
+            } else {
+                pHigh = keyValueProviders[j].getValue(tupleToBeInserted.getFieldData(j),
+                        tupleToBeInserted.getFieldStart(j));
+            }
+            areaAfterEnlarge *= pHigh - pLow;
+        }
+        return areaAfterEnlarge - areaBeforeEnlarge;
+    }
 
-	private double area(ITupleReference tuple, MultiComparator cmp) {
-		double area = 1.0;
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			area *= cmp.getValueProviders()[j].getValue(tuple.getFieldData(j),
-					tuple.getFieldStart(j))
-					- cmp.getValueProviders()[i].getValue(
-							tuple.getFieldData(i), tuple.getFieldStart(i));
-		}
-		return area;
-	}
+    private double area(ITupleReference tuple, MultiComparator cmp) {
+        double area = 1.0;
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            area *= keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j))
+                    - keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+        }
+        return area;
+    }
 
-	@Override
-	public void enlarge(ITupleReference tuple, MultiComparator cmp) {
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i),
-					frameTuple.getFieldStart(i), frameTuple.getFieldLength(i),
-					tuple.getFieldData(i), tuple.getFieldStart(i),
-					tuple.getFieldLength(i));
-			if (c > 0) {
-				System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
-						frameTuple.getFieldData(i),
-						frameTuple.getFieldStart(i), tuple.getFieldLength(i));
-			}
-			c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j),
-					frameTuple.getFieldStart(j), frameTuple.getFieldLength(j),
-					tuple.getFieldData(j), tuple.getFieldStart(j),
-					tuple.getFieldLength(j));
-			if (c < 0) {
-				System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j),
-						frameTuple.getFieldData(j),
-						frameTuple.getFieldStart(j), tuple.getFieldLength(j));
-			}
-		}
-	}
+    @Override
+    public void enlarge(ITupleReference tuple, MultiComparator cmp) {
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+                    frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            if (c > 0) {
+                System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), frameTuple.getFieldData(i),
+                        frameTuple.getFieldStart(i), tuple.getFieldLength(i));
+            }
+            c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+                    tuple.getFieldLength(j));
+            if (c < 0) {
+                System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j), frameTuple.getFieldData(j),
+                        frameTuple.getFieldStart(j), tuple.getFieldLength(j));
+            }
+        }
+    }
 
-	@Override
-	public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
-		for (int i = 0; i < tuples.length; i++) {
-			tuples[i].setFieldCount(cmp.getKeyFieldCount());
-			tuples[i].resetByTupleIndex(this, 0);
-		}
+    @Override
+    public void adjustMBR(ITreeIndexTupleReference[] tuples) {
+        for (int i = 0; i < tuples.length; i++) {
+            tuples[i].setFieldCount(keyValueProviders.length);
+            tuples[i].resetByTupleIndex(this, 0);
+        }
 
-		adjustMBRImpl(tuples, cmp);
-	}
-}
\ No newline at end of file
+        adjustMBRImpl(tuples);
+    }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
index 100b4d8..943a179 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -15,29 +15,37 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
 
 public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
 
-	private static final long serialVersionUID = 1L;
-	private ITreeIndexTupleWriterFactory tupleWriterFactory;
-	private int keyFieldCount;
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
 
-	public RTreeNSMInteriorFrameFactory(
-			ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
-		this.tupleWriterFactory = tupleWriterFactory;
-		if (keyFieldCount % 2 != 0) {
-			throw new IllegalArgumentException(
-					"The key has different number of dimensions.");
-		}
-		this.keyFieldCount = keyFieldCount;
-	}
+    public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, IPrimitiveValueProviderFactory[] keyValueProviderFactories) {
+        this.tupleWriterFactory = tupleWriterFactory;
+        if (keyValueProviderFactories.length % 2 != 0) {
+            throw new IllegalArgumentException("The key has different number of dimensions.");
+        }
+        this.keyValueProviderFactories = keyValueProviderFactories;
+    }
+
+    @Override
+    public IRTreeInteriorFrame createFrame() {
+        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
+        for (int i = 0; i < keyValueProviders.length; i++) {
+            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
+        }
+        return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders);
+    }
 
 	@Override
-	public IRTreeInteriorFrame createFrame() {
-		return new RTreeNSMInteriorFrame(
-				tupleWriterFactory.createTupleWriter(), keyFieldCount);
+	public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+		return tupleWriterFactory;
 	}
-}
\ No newline at end of file
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index fa2ad32..b2ae5fc 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -16,10 +16,12 @@
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
@@ -30,263 +32,225 @@
 
 public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
 
-	public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter,
-			int keyFieldCount) {
-		super(tupleWriter, keyFieldCount);
-	}
+    public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, IPrimitiveValueProvider[] keyValueProviders) {
+        super(tupleWriter, keyValueProviders);
+    }
 
-	@Override
-	public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
-	}
+    @Override
+    public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
+        return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
+    }
 
-	@Override
-	public boolean intersect(ITupleReference tuple, int tupleIndex,
-			MultiComparator cmp) {
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		frameTuple.resetByTupleIndex(this, tupleIndex);
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
-					tuple.getFieldStart(i), tuple.getFieldLength(i),
-					frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
-					frameTuple.getFieldLength(j));
-			if (c > 0) {
-				return false;
-			}
-			c = cmp.getComparators()[i].compare(tuple.getFieldData(j),
-					tuple.getFieldStart(j), tuple.getFieldLength(j),
-					frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
-					frameTuple.getFieldLength(i));
+    @Override
+    public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+        frameTuple.resetByTupleIndex(this, tupleIndex);
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+                    frameTuple.getFieldLength(j));
+            if (c > 0) {
+                return false;
+            }
+            c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+                    frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
 
-			if (c < 0) {
-				return false;
-			}
-		}
-		return true;
-	}
+            if (c < 0) {
+                return false;
+            }
+        }
+        return true;
+    }
 
-	@Override
-	public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
-			MultiComparator cmp, ISplitKey splitKey) throws Exception {
+    @Override
+    public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
 
-		rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-		frameTuple.setFieldCount(cmp.getFieldCount());
+        RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+        RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
 
-		RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
-		RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
-		RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame
-				.getTupleWriter());
+        // calculations are based on the R*-tree paper
+        int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+        int splitDistribution = getTupleCount() - (2 * m) + 2;
 
-		// calculations are based on the R*-tree paper
-		int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
-		int splitDistribution = getTupleCount() - (2 * m) + 2;
+        // to calculate the minimum margin in order to pick the split axis
+        double minMargin = Double.MAX_VALUE;
+        int splitAxis = 0, sortOrder = 0;
 
-		// to calculate the minimum margin in order to pick the split axis
-		double minMargin = Double.MAX_VALUE;
-		int splitAxis = 0, sortOrder = 0;
+        int maxFieldPos = keyValueProviders.length / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            for (int k = 0; k < getTupleCount(); ++k) {
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			for (int k = 0; k < getTupleCount(); ++k) {
+                frameTuple.resetByTupleIndex(this, k);
 
-				frameTuple.resetByTupleIndex(this, k);
+                double LowerKey = keyValueProviders[i].getValue(frameTuple.getFieldData(i),
+                        frameTuple.getFieldStart(i));
+                double UpperKey = keyValueProviders[j].getValue(frameTuple.getFieldData(j),
+                        frameTuple.getFieldStart(j));
 
-				double LowerKey = cmp.getValueProviders()[i]
-						.getValue(frameTuple.getFieldData(i),
-								frameTuple.getFieldStart(i));
-				double UpperKey = cmp.getValueProviders()[j]
-						.getValue(frameTuple.getFieldData(j),
-								frameTuple.getFieldStart(j));
+                tupleEntries1.add(k, LowerKey);
+                tupleEntries2.add(k, UpperKey);
+            }
+            double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+            double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
 
-				tupleEntries1.add(k, LowerKey);
-				tupleEntries2.add(k, UpperKey);
-			}
-			double LowerKey = cmp.getValueProviders()[i].getValue(
-					tuple.getFieldData(i), tuple.getFieldStart(i));
-			double UpperKey = cmp.getValueProviders()[j].getValue(
-					tuple.getFieldData(j), tuple.getFieldStart(j));
+            tupleEntries1.add(-1, LowerKey);
+            tupleEntries2.add(-1, UpperKey);
 
-			tupleEntries1.add(-1, LowerKey);
-			tupleEntries2.add(-1, UpperKey);
+            tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+            tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
 
-			tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-			tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+            double lowerMargin = 0.0, upperMargin = 0.0;
+            // generate distribution
+            for (int k = 1; k <= splitDistribution; ++k) {
+                int d = m - 1 + k;
 
-			double lowerMargin = 0.0, upperMargin = 0.0;
-			// generate distribution
-			for (int k = 1; k <= splitDistribution; ++k) {
-				int d = m - 1 + k;
+                generateDist(tuple, tupleEntries1, rec[0], 0, d);
+                generateDist(tuple, tupleEntries2, rec[1], 0, d);
+                generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+                generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
 
-				generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
-				generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
-				generateDist(tuple, tupleEntries1, rec[2], d,
-						getTupleCount() + 1, cmp);
-				generateDist(tuple, tupleEntries2, rec[3], d,
-						getTupleCount() + 1, cmp);
+                // calculate the margin of the distributions
+                lowerMargin += rec[0].margin() + rec[2].margin();
+                upperMargin += rec[1].margin() + rec[3].margin();
+            }
+            double margin = Math.min(lowerMargin, upperMargin);
 
-				// calculate the margin of the distributions
-				lowerMargin += rec[0].margin() + rec[2].margin();
-				upperMargin += rec[1].margin() + rec[3].margin();
-			}
-			double margin = Math.min(lowerMargin, upperMargin);
+            // store minimum margin as split axis
+            if (margin < minMargin) {
+                minMargin = margin;
+                splitAxis = i;
+                sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+            }
 
-			// store minimum margin as split axis
-			if (margin < minMargin) {
-				minMargin = margin;
-				splitAxis = i;
-				sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
-			}
+            tupleEntries1.clear();
+            tupleEntries2.clear();
+        }
 
-			tupleEntries1.clear();
-			tupleEntries2.clear();
-		}
+        for (int i = 0; i < getTupleCount(); ++i) {
+            frameTuple.resetByTupleIndex(this, i);
+            double key = keyValueProviders[splitAxis + sortOrder].getValue(
+                    frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+            tupleEntries1.add(i, key);
+        }
+        double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+                tuple.getFieldStart(splitAxis + sortOrder));
+        tupleEntries1.add(-1, key);
+        tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
 
-		for (int i = 0; i < getTupleCount(); ++i) {
-			frameTuple.resetByTupleIndex(this, i);
-			double key = cmp.getValueProviders()[splitAxis + sortOrder]
-					.getValue(frameTuple.getFieldData(splitAxis + sortOrder),
-							frameTuple.getFieldStart(splitAxis + sortOrder));
-			tupleEntries1.add(i, key);
-		}
-		double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
-				tuple.getFieldData(splitAxis + sortOrder),
-				tuple.getFieldStart(splitAxis + sortOrder));
-		tupleEntries1.add(-1, key);
-		tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+        double minArea = Double.MAX_VALUE;
+        double minOverlap = Double.MAX_VALUE;
+        int splitPoint = 0;
+        for (int i = 1; i <= splitDistribution; ++i) {
+            int d = m - 1 + i;
 
-		double minArea = Double.MAX_VALUE;
-		double minOverlap = Double.MAX_VALUE;
-		int splitPoint = 0;
-		for (int i = 1; i <= splitDistribution; ++i) {
-			int d = m - 1 + i;
+            generateDist(tuple, tupleEntries1, rec[0], 0, d);
+            generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
 
-			generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
-			generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
-					cmp);
+            double overlap = rec[0].overlappedArea(rec[2]);
+            if (overlap < minOverlap) {
+                splitPoint = d;
+                minOverlap = overlap;
+                minArea = rec[0].area() + rec[2].area();
+            } else if (overlap == minOverlap) {
+                double area = rec[0].area() + rec[2].area();
+                if (area < minArea) {
+                    splitPoint = d;
+                    minArea = area;
+                }
+            }
+        }
+        int startIndex, endIndex;
+        if (splitPoint < (getTupleCount() + 1) / 2) {
+            startIndex = 0;
+            endIndex = splitPoint;
+        } else {
+            startIndex = splitPoint;
+            endIndex = (getTupleCount() + 1);
+        }
+        boolean tupleInserted = false;
+        int totalBytes = 0, numOfDeletedTuples = 0;
+        for (int i = startIndex; i < endIndex; i++) {
+            if (tupleEntries1.get(i).getTupleIndex() != -1) {
+                frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+                rightFrame.insert(frameTuple, -1);
+                ((UnorderedSlotManager) slotManager).modifySlot(
+                        slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+                totalBytes += tupleWriter.bytesRequired(frameTuple);
+                numOfDeletedTuples++;
+            } else {
+                rightFrame.insert(tuple, -1);
+                tupleInserted = true;
+            }
+        }
 
-			double overlap = rec[0].overlappedArea(rec[2]);
-			if (overlap < minOverlap) {
-				splitPoint = d;
-				minOverlap = overlap;
-				minArea = rec[0].area() + rec[2].area();
-			} else if (overlap == minOverlap) {
-				double area = rec[0].area() + rec[2].area();
-				if (area < minArea) {
-					splitPoint = d;
-					minArea = area;
-				}
-			}
-		}
-		int startIndex, endIndex;
-		if (splitPoint < (getTupleCount() + 1) / 2) {
-			startIndex = 0;
-			endIndex = splitPoint;
-		} else {
-			startIndex = splitPoint;
-			endIndex = (getTupleCount() + 1);
-		}
-		boolean tupleInserted = false;
-		int totalBytes = 0, numOfDeletedTuples = 0;
-		for (int i = startIndex; i < endIndex; i++) {
-			if (tupleEntries1.get(i).getTupleIndex() != -1) {
-				frameTuple.resetByTupleIndex(this, tupleEntries1.get(i)
-						.getTupleIndex());
-				rightFrame.insert(frameTuple, cmp, -1);
-				((UnorderedSlotManager) slotManager).modifySlot(slotManager
-						.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
-				totalBytes += tupleWriter.bytesRequired(frameTuple);
-				numOfDeletedTuples++;
-			} else {
-				rightFrame.insert(tuple, cmp, -1);
-				tupleInserted = true;
-			}
-		}
+        ((UnorderedSlotManager) slotManager).deleteEmptySlots();
 
-		((UnorderedSlotManager) slotManager).deleteEmptySlots();
+        // maintain space information
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+                + (slotManager.getSlotSize() * numOfDeletedTuples));
 
-		// maintain space information
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
-				+ totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
+        // compact both pages
+        rightFrame.compact();
+        compact();
 
-		// compact both pages
-		rightFrame.compact(cmp);
-		compact(cmp);
+        if (!tupleInserted) {
+            insert(tuple, -1);
+        }
 
-		if (!tupleInserted) {
-			insert(tuple, cmp, -1);
-		}
+        int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
 
-		int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
-		frameTuple.resetByTupleOffset(buf, tupleOff);
-		int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0,
-				cmp.getKeyFieldCount());
+        splitKey.initData(splitKeySize);
+        this.adjustMBR(tuples);
+        rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+        rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
 
-		splitKey.initData(splitKeySize);
-		this.adjustMBR(tuples, cmp);
-		rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0,
-				rTreeSplitKey.getLeftPageBuffer(), 0);
-		rTreeSplitKey.getLeftTuple().resetByTupleOffset(
-				rTreeSplitKey.getLeftPageBuffer(), 0);
+        ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples());
+        rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+                rTreeSplitKey.getRightPageBuffer(), 0);
+        rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
 
-		((IRTreeFrame) rightFrame).adjustMBR(
-				((RTreeNSMFrame) rightFrame).getTuples(), cmp);
-		rTreeTupleWriterRightFrame.writeTupleFields(
-				((RTreeNSMFrame) rightFrame).getTuples(), 0,
-				rTreeSplitKey.getRightPageBuffer(), 0);
-		rTreeSplitKey.getRightTuple().resetByTupleOffset(
-				rTreeSplitKey.getRightPageBuffer(), 0);
+        tupleEntries1.clear();
+        tupleEntries2.clear();
+    }
 
-		tupleEntries1.clear();
-		tupleEntries2.clear();
-		return 0;
-	}
+    @Override
+    public void insert(ITupleReference tuple, int tupleIndex) {
+        slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+        int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
 
-	@Override
-	public void insert(ITupleReference tuple, MultiComparator cmp,
-			int tupleIndex) throws Exception {
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
-		int bytesWritten = tupleWriter.writeTuple(tuple, buf,
-				buf.getInt(freeSpaceOff));
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+        buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+    }
 
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
-		buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
-				- bytesWritten - slotManager.getSlotSize());
-	}
+    @Override
+    public void delete(int tupleIndex, MultiComparator cmp) {
+        int slotOff = slotManager.getSlotOff(tupleIndex);
 
-	@Override
-	public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
-		frameTuple.setFieldCount(cmp.getFieldCount());
-		int slotOff = slotManager.getSlotOff(tupleIndex);
+        int tupleOff = slotManager.getTupleOff(slotOff);
+        frameTuple.resetByTupleOffset(buf, tupleOff);
+        int tupleSize = tupleWriter.bytesRequired(frameTuple);
 
-		int tupleOff = slotManager.getTupleOff(slotOff);
-		frameTuple.resetByTupleOffset(buf, tupleOff);
-		int tupleSize = tupleWriter.bytesRequired(frameTuple);
+        // perform deletion (we just do a memcpy to overwrite the slot)
+        int slotStartOff = slotManager.getSlotEndOff();
+        int length = slotOff - slotStartOff;
+        System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
 
-		// perform deletion (we just do a memcpy to overwrite the slot)
-		int slotStartOff = slotManager.getSlotEndOff();
-		int length = slotOff - slotStartOff;
-		System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff
-				+ slotManager.getSlotSize(), length);
+        // maintain space information
+        buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+        buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+    }
 
-		// maintain space information
-		buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
-		buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize
-				+ slotManager.getSlotSize());
-	}
-
-	@Override
-	public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
-		for (int i = 0; i < tuples.length; i++) {
-			tuples[i].setFieldCount(cmp.getFieldCount());
-			tuples[i].resetByTupleIndex(this, 0);
-		}
-
-		adjustMBRImpl(tuples, cmp);
-	}
-}
\ No newline at end of file
+    @Override
+    public void adjustMBR(ITreeIndexTupleReference[] tuples) {
+        for (int i = 0; i < tuples.length; i++) {
+            tuples[i].resetByTupleIndex(this, 0);
+        }
+        adjustMBRImpl(tuples);
+    }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
index 51a047e..e31148f 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -15,29 +15,37 @@
 
 package edu.uci.ics.hyracks.storage.am.rtree.frames;
 
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 
 public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
 
-	private static final long serialVersionUID = 1L;
-	private ITreeIndexTupleWriterFactory tupleWriterFactory;
-	private int keyFieldCount;
+    private static final long serialVersionUID = 1L;
+    private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+    private final IPrimitiveValueProviderFactory[] keyValueProviderFactories;
 
-	public RTreeNSMLeafFrameFactory(
-			ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
-		this.tupleWriterFactory = tupleWriterFactory;
-		if (keyFieldCount % 2 != 0) {
-			throw new IllegalArgumentException(
-					"The key has different number of dimensions.");
-		}
-		this.keyFieldCount = keyFieldCount;
-	}
+    public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, IPrimitiveValueProviderFactory[] keyValueProviderFactories) {
+        this.tupleWriterFactory = tupleWriterFactory;
+        if (keyValueProviderFactories.length % 2 != 0) {
+            throw new IllegalArgumentException("The key has different number of dimensions.");
+        }
+        this.keyValueProviderFactories = keyValueProviderFactories;
+    }
+
+    @Override
+    public IRTreeLeafFrame createFrame() {
+        IPrimitiveValueProvider[] keyValueProviders = new IPrimitiveValueProvider[keyValueProviderFactories.length];
+        for (int i = 0; i < keyValueProviders.length; i++) {
+            keyValueProviders[i] = keyValueProviderFactories[i].createPrimitiveValueProvider();
+        }
+        return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyValueProviders);
+    }
 
 	@Override
-	public IRTreeLeafFrame createFrame() {
-		return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(),
-				keyFieldCount);
+	public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+		return tupleWriterFactory;
 	}
-}
\ No newline at end of file
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
index d66d0a0..4f86111 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
@@ -16,15 +16,16 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.LongArrayList;
 
 public class PathList {
 	private IntArrayList pageIds;
-	private IntArrayList pageLsns;
+	private LongArrayList pageLsns;
 	private IntArrayList pageIndexes;
 
 	public PathList(int initialCapacity, int growth) {
 		pageIds = new IntArrayList(initialCapacity, growth);
-		pageLsns = new IntArrayList(initialCapacity, growth);
+		pageLsns = new LongArrayList(initialCapacity, growth);
 		pageIndexes = new IntArrayList(initialCapacity, growth);
 	}
 
@@ -36,7 +37,7 @@
 		return pageIds.first();
 	}
 
-	public void add(int pageId, int pageLsn, int pageIndex) {
+	public void add(int pageId, long pageLsn, int pageIndex) {
 		pageIds.add(pageId);
 		pageLsns.add(pageLsn);
 		pageIndexes.add(pageIndex);
@@ -46,7 +47,7 @@
 		return pageIds.getFirst();
 	}
 
-	public int getFirstPageLsn() {
+	public long getFirstPageLsn() {
 		return pageLsns.getFirst();
 	}
 
@@ -58,7 +59,7 @@
 		return pageIds.getLast();
 	}
 
-	public int getLastPageLsn() {
+	public long getLastPageLsn() {
 		return pageLsns.getLast();
 	}
 
@@ -70,7 +71,7 @@
 		return pageIds.get(i);
 	}
 
-	public int getPageLsn(int i) {
+	public long getPageLsn(int i) {
 		return pageLsns.get(i);
 	}
 
@@ -78,7 +79,7 @@
 		return pageIndexes.get(i);
 	}
 
-	public void setPageLsn(int i, int pageLsn) {
+	public void setPageLsn(int i, long pageLsn) {
 		pageLsns.set(i, pageLsn);
 	}
 
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index 03b2062..cb553f7 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -16,7 +16,6 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 import java.util.ArrayList;
-import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.locks.ReadWriteLock;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
@@ -26,17 +25,19 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
@@ -47,972 +48,893 @@
 
 public class RTree implements ITreeIndex {
 
-	private boolean created = false;
-	private boolean loaded = false;
-	private final int rootPage = 1; // the root page never changes
+    private boolean created = false;
+    private boolean loaded = false;
+    private final int rootPage = 1; // the root page never changes
 
-	private final AtomicInteger globalNsn; // Global node sequence number
-	private int numOfPages = 1;
-	private final ReadWriteLock treeLatch;
+    private final AtomicLong globalNsn; // Global node sequence number
+    private int numOfPages = 1;
+    private final ReadWriteLock treeLatch;
 
-	private final IFreePageManager freePageManager;
-	private final IBufferCache bufferCache;
-	private int fileId;
+    private final IFreePageManager freePageManager;
+    private final IBufferCache bufferCache;
+    private int fileId;
 
-	private final SearchPredicate diskOrderScanPredicate;
-	private final ITreeIndexFrameFactory interiorFrameFactory;
-	private final ITreeIndexFrameFactory leafFrameFactory;
-	private final MultiComparator cmp;
+    private final SearchPredicate diskOrderScanPredicate;
+    private final ITreeIndexFrameFactory interiorFrameFactory;
+    private final ITreeIndexFrameFactory leafFrameFactory;
+    private final int fieldCount;
+    private final MultiComparator cmp;
 
-	public int rootSplits = 0;
-	public int[] splitsByLevel = new int[500];
-	public AtomicLong readLatchesAcquired = new AtomicLong();
-	public AtomicLong readLatchesReleased = new AtomicLong();
-	public AtomicLong writeLatchesAcquired = new AtomicLong();
-	public AtomicLong writeLatchesReleased = new AtomicLong();
-	public AtomicLong pins = new AtomicLong();
-	public AtomicLong unpins = new AtomicLong();
-	public byte currentLevel = 0;
+    public int rootSplits = 0;
+    public int[] splitsByLevel = new int[500];
+    public AtomicLong readLatchesAcquired = new AtomicLong();
+    public AtomicLong readLatchesReleased = new AtomicLong();
+    public AtomicLong writeLatchesAcquired = new AtomicLong();
+    public AtomicLong writeLatchesReleased = new AtomicLong();
+    public AtomicLong pins = new AtomicLong();
+    public AtomicLong unpins = new AtomicLong();
+    public byte currentLevel = 0;
 
-	public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
-			ITreeIndexFrameFactory interiorFrameFactory,
-			ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
-		this.bufferCache = bufferCache;
-		this.freePageManager = freePageManager;
-		this.interiorFrameFactory = interiorFrameFactory;
-		this.leafFrameFactory = leafFrameFactory;
-		this.cmp = cmp;
-		globalNsn = new AtomicInteger();
-		this.treeLatch = new ReentrantReadWriteLock(true);
-		this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
-	}
+    // TODO: is MultiComparator needed at all?
+    public RTree(IBufferCache bufferCache, int fieldCount, MultiComparator cmp, IFreePageManager freePageManager,
+            ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory) {
+        this.bufferCache = bufferCache;
+        this.fieldCount = fieldCount;
+        this.cmp = cmp;
+        this.freePageManager = freePageManager;
+        this.interiorFrameFactory = interiorFrameFactory;
+        this.leafFrameFactory = leafFrameFactory;        
+        globalNsn = new AtomicLong();
+        this.treeLatch = new ReentrantReadWriteLock(true);
+        this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
+    }
 
-	public void incrementGlobalNsn() {
-		globalNsn.incrementAndGet();
-	}
+    public void incrementGlobalNsn() {
+        globalNsn.incrementAndGet();
+    }
 
-	public int getGlobalNsn() {
-		return globalNsn.get();
-	}
+    public long getGlobalNsn() {
+        return globalNsn.get();
+    }
 
-	public void incrementReadLatchesAcquired() {
-		readLatchesAcquired.incrementAndGet();
-	}
+    public void incrementReadLatchesAcquired() {
+        readLatchesAcquired.incrementAndGet();
+    }
 
-	public void incrementReadLatchesReleased() {
-		readLatchesReleased.incrementAndGet();
-	}
+    public void incrementReadLatchesReleased() {
+        readLatchesReleased.incrementAndGet();
+    }
 
-	public void incrementWriteLatchesAcquired() {
-		writeLatchesAcquired.incrementAndGet();
-	}
+    public void incrementWriteLatchesAcquired() {
+        writeLatchesAcquired.incrementAndGet();
+    }
 
-	public void incrementWriteLatchesReleased() {
-		writeLatchesReleased.incrementAndGet();
-	}
+    public void incrementWriteLatchesReleased() {
+        writeLatchesReleased.incrementAndGet();
+    }
 
-	public void incrementPins() {
-		pins.incrementAndGet();
-	}
+    public void incrementPins() {
+        pins.incrementAndGet();
+    }
 
-	public void incrementUnpins() {
-		unpins.incrementAndGet();
-	}
+    public void incrementUnpins() {
+        unpins.incrementAndGet();
+    }
 
-	public String printStats() {
-		StringBuilder strBuilder = new StringBuilder();
-		strBuilder.append("\n");
-		strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
-		strBuilder.append("SPLITS BY LEVEL\n");
-		for (int i = 0; i < currentLevel; i++) {
-			strBuilder.append(String.format("%3d ", i)
-					+ String.format("%8d ", splitsByLevel[i]) + "\n");
-		}
-		strBuilder.append(String.format("READ LATCHES:  %10d %10d\n",
-				readLatchesAcquired.get(), readLatchesReleased.get()));
-		strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n",
-				writeLatchesAcquired.get(), writeLatchesReleased.get()));
-		strBuilder.append(String.format("PINS:          %10d %10d\n",
-				pins.get(), unpins.get()));
+    public String printStats() {
+        StringBuilder strBuilder = new StringBuilder();
+        strBuilder.append("\n");
+        strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
+        strBuilder.append("SPLITS BY LEVEL\n");
+        for (int i = 0; i < currentLevel; i++) {
+            strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
+        }
+        strBuilder.append(String.format("READ LATCHES:  %10d %10d\n", readLatchesAcquired.get(),
+                readLatchesReleased.get()));
+        strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired.get(),
+                writeLatchesReleased.get()));
+        strBuilder.append(String.format("PINS:          %10d %10d\n", pins.get(), unpins.get()));
 
-		strBuilder.append(String.format("Num of Pages:          %10d\n",
-				numOfPages));
+        strBuilder.append(String.format("Num of Pages:          %10d\n", numOfPages));
 
-		return strBuilder.toString();
-	}
+        return strBuilder.toString();
+    }
 
-	public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
-			ISerializerDeserializer[] fields) throws Exception {
-		printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
-	}
+    public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame, ISerializerDeserializer[] keySerdes)
+            throws Exception {
+        printTree(rootPage, null, false, leafFrame, interiorFrame, keySerdes);
+    }
 
-	public void printTree(int pageId, ICachedPage parent, boolean unpin,
-			IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
-			ISerializerDeserializer[] fields) throws Exception {
+    public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeFrame leafFrame,
+            IRTreeFrame interiorFrame, ISerializerDeserializer[] keySerdes) throws Exception {
 
-		ICachedPage node = bufferCache.pin(
-				BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-		incrementPins();
-		node.acquireReadLatch();
-		incrementReadLatchesAcquired();
+        ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+        incrementPins();
+        node.acquireReadLatch();
+        incrementReadLatchesAcquired();
 
-		try {
-			if (parent != null && unpin == true) {
-				parent.releaseReadLatch();
-				incrementReadLatchesReleased();
-				bufferCache.unpin(parent);
-				incrementUnpins();
-			}
+        try {
+            if (parent != null && unpin == true) {
+                parent.releaseReadLatch();
+                incrementReadLatchesReleased();
+                bufferCache.unpin(parent);
+                incrementUnpins();
+            }
 
-			interiorFrame.setPage(node);
-			int level = interiorFrame.getLevel();
+            interiorFrame.setPage(node);
+            int level = interiorFrame.getLevel();
 
-			System.out.format("%1d ", level);
-			System.out.format("%3d ", pageId);
-			for (int i = 0; i < currentLevel - level; i++)
-				System.out.format("    ");
+            System.out.format("%1d ", level);
+            System.out.format("%3d ", pageId);
+            for (int i = 0; i < currentLevel - level; i++)
+                System.out.format("    ");
 
-			String keyString;
-			if (interiorFrame.isLeaf()) {
-				leafFrame.setPage(node);
-				keyString = leafFrame.printKeys(cmp, fields);
-			} else {
-				keyString = interiorFrame.printKeys(cmp, fields);
-			}
+            String keyString;
+            if (interiorFrame.isLeaf()) {
+                leafFrame.setPage(node);
+                keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
+            } else {
+                keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
+            }
 
-			System.out.format(keyString);
-			if (!interiorFrame.isLeaf()) {
-				ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame))
-						.getChildren(cmp);
-				for (int i = 0; i < children.size(); i++) {
-					printTree(children.get(i), node, i == children.size() - 1,
-							leafFrame, interiorFrame, fields);
-				}
-			} else {
-				node.releaseReadLatch();
-				incrementReadLatchesReleased();
-				bufferCache.unpin(node);
-				incrementUnpins();
-			}
-		} catch (Exception e) {
-			node.releaseReadLatch();
-			incrementReadLatchesReleased();
-			bufferCache.unpin(node);
-			incrementUnpins();
-			throw e;
-		}
-	}
+            System.out.format(keyString);
+            if (!interiorFrame.isLeaf()) {
+                ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame)).getChildren(cmp);
+                for (int i = 0; i < children.size(); i++) {
+                    printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, keySerdes);
+                }
+            } else {
+                node.releaseReadLatch();
+                incrementReadLatchesReleased();
+                bufferCache.unpin(node);
+                incrementUnpins();
+            }
+        } catch (Exception e) {
+            node.releaseReadLatch();
+            incrementReadLatchesReleased();
+            bufferCache.unpin(node);
+            incrementUnpins();
+            throw e;
+        }
+    }
+
+    @Override
+    public void create(int fileId) throws HyracksDataException {
+        treeLatch.writeLock().lock();
+        try {
+            if (created) {
+                return;
+            }
+
+            ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+            ITreeIndexMetaDataFrame metaFrame = freePageManager.getMetaDataFrameFactory().createFrame();
+            freePageManager.init(metaFrame, rootPage);
+
+            // initialize root page
+            ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+            incrementPins();
+
+            rootNode.acquireWriteLatch();
+            incrementWriteLatchesAcquired();
+            try {
+                leafFrame.setPage(rootNode);
+                leafFrame.initBuffer((byte) 0);
+            } finally {
+                rootNode.releaseWriteLatch();
+                incrementWriteLatchesReleased();
+                bufferCache.unpin(rootNode);
+                incrementUnpins();
+            }
+            currentLevel = 0;
+
+            created = true;
+        } finally {
+            treeLatch.writeLock().unlock();
+        }
+    }
+
+    public void open(int fileId) {
+        this.fileId = fileId;
+    }
+
+    public void close() {
+        fileId = -1;
+    }
 
 	@Override
-	public void create(int fileId, ITreeIndexFrame leafFrame,
-			ITreeIndexMetaDataFrame metaFrame) throws Exception {
-		if (created)
-			return;
-
-		treeLatch.writeLock().lock();
-		try {
-			// check if another thread beat us to it
-			if (created)
-				return;
-
-			freePageManager.init(metaFrame, rootPage);
-
-			// initialize root page
-			ICachedPage rootNode = bufferCache.pin(
-					BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
-			incrementPins();
-
-			rootNode.acquireWriteLatch();
-			incrementWriteLatchesAcquired();
-			try {
-				leafFrame.setPage(rootNode);
-				leafFrame.initBuffer((byte) 0);
-			} finally {
-				rootNode.releaseWriteLatch();
-				incrementWriteLatchesReleased();
-				bufferCache.unpin(rootNode);
-				incrementUnpins();
-			}
-			currentLevel = 0;
-
-			created = true;
-		} finally {
-			treeLatch.writeLock().unlock();
-		}
+	public RTreeOpContext createOpContext(IndexOp op) {
+		return new RTreeOpContext(op,
+				(IRTreeLeafFrame) leafFrameFactory.createFrame(),
+				(IRTreeInteriorFrame) interiorFrameFactory.createFrame(),
+				freePageManager.getMetaDataFrameFactory().createFrame(), 8);
 	}
 
-	public void open(int fileId) {
-		this.fileId = fileId;
-	}
+    @Override
+    public void insert(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        RTreeOpContext ctx = (RTreeOpContext) ictx;
+        ctx.reset();
+        ctx.setTuple(tuple);
+        ctx.splitKey.reset();
+        ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+        ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
 
-	public void close() {
-		fileId = -1;
-	}
+        int maxFieldPos = cmp.getKeyFieldCount() / 2;
+        for (int i = 0; i < maxFieldPos; i++) {
+            int j = maxFieldPos + i;
+            int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+            if (c > 0) {
+                throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
+            }
+        }
 
-	@Override
-	public RTreeOpContext createOpContext(IndexOp op,
-			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-			ITreeIndexMetaDataFrame metaFrame) {
-		return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame,
-				(IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
-	}
+        ICachedPage leafNode = findLeaf(ctx);
 
-	@Override
-	public void insert(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception {
-		RTreeOpContext ctx = (RTreeOpContext) ictx;
-		ctx.reset();
-		ctx.setTuple(tuple);
-		ctx.splitKey.reset();
-		ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
-		ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
-		ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-		ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+        int pageId = ctx.pathList.getLastPageId();
+        ctx.pathList.moveLast();
+        insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
 
-		int maxFieldPos = cmp.getKeyFieldCount() / 2;
-		for (int i = 0; i < maxFieldPos; i++) {
-			int j = maxFieldPos + i;
-			int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
-					tuple.getFieldStart(i), tuple.getFieldLength(i),
-					tuple.getFieldData(j), tuple.getFieldStart(j),
-					tuple.getFieldLength(j));
-			if (c > 0) {
-				throw new IllegalArgumentException(
-						"The low key point has larger coordinates than the high key point.");
-			}
-		}
+        while (true) {
+            if (ctx.splitKey.getLeftPageBuffer() != null) {
+                updateParentForInsert(ctx);
+            } else {
+                break;
+            }
+        }
 
-		ICachedPage leafNode = findLeaf(ctx);
+        leafNode.releaseWriteLatch();
+        incrementWriteLatchesReleased();
+        bufferCache.unpin(leafNode);
+        incrementUnpins();
+    }
 
-		int pageId = ctx.pathList.getLastPageId();
-		ctx.pathList.moveLast();
-		insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+    public ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
+        int pageId = rootPage;
+        boolean writeLatched = false;
+        ICachedPage node = null;
+        boolean isLeaf = false;
+        long pageLsn = 0, parentLsn = 0;
 
-		while (true) {
-			if (ctx.splitKey.getLeftPageBuffer() != null) {
-				updateParentForInsert(ctx);
-			} else {
-				break;
-			}
-		}
+        while (true) {
+            if (!writeLatched) {
+                node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                incrementPins();
+                ctx.interiorFrame.setPage(node);
+                isLeaf = ctx.interiorFrame.isLeaf();
+                if (isLeaf) {
+                    node.acquireWriteLatch();
+                    incrementWriteLatchesAcquired();
+                    writeLatched = true;
 
-		leafNode.releaseWriteLatch();
-		incrementWriteLatchesReleased();
-		bufferCache.unpin(leafNode);
-		incrementUnpins();
-	}
+                    if (!ctx.interiorFrame.isLeaf()) {
+                        node.releaseWriteLatch();
+                        incrementWriteLatchesReleased();
+                        bufferCache.unpin(node);
+                        incrementUnpins();
+                        writeLatched = false;
+                        continue;
+                    }
+                } else {
+                    // Be optimistic and grab read latch first. We will swap it
+                    // to write latch if we need to enlarge the best child
+                    // tuple.
+                    node.acquireReadLatch();
+                    incrementReadLatchesAcquired();
+                }
+            }
 
-	public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
-		int pageId = rootPage;
-		boolean writeLatched = false;
-		ICachedPage node = null;
-		boolean isLeaf = false;
-		int pageLsn = 0, parentLsn = 0;
+            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                // Concurrent split detected, go back to parent and re-choose
+                // the best child
+                if (writeLatched) {
+                    node.releaseWriteLatch();
+                    incrementWriteLatchesReleased();
+                    bufferCache.unpin(node);
+                    incrementUnpins();
+                    writeLatched = false;
+                } else {
+                    node.releaseReadLatch();
+                    incrementReadLatchesReleased();
+                    bufferCache.unpin(node);
+                    incrementUnpins();
+                }
 
-		while (true) {
-			if (!writeLatched) {
-				node = bufferCache
-						.pin(BufferedFileHandle.getDiskPageId(fileId, pageId),
-								false);
-				incrementPins();
-				ctx.interiorFrame.setPage(node);
-				isLeaf = ctx.interiorFrame.isLeaf();
-				if (isLeaf) {
-					node.acquireWriteLatch();
-					incrementWriteLatchesAcquired();
-					writeLatched = true;
+                pageId = ctx.pathList.getLastPageId();
+                if (pageId != rootPage) {
+                    parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
+                }
+                ctx.pathList.moveLast();
+                continue;
+            }
 
-					if (!ctx.interiorFrame.isLeaf()) {
-						node.releaseWriteLatch();
-						incrementWriteLatchesReleased();
-						bufferCache.unpin(node);
-						incrementUnpins();
-						writeLatched = false;
-						continue;
-					}
-				} else {
-					// Be optimistic and grab read latch first. We will swap it
-					// to write latch if we need to enlarge the best child
-					// tuple.
-					node.acquireReadLatch();
-					incrementReadLatchesAcquired();
-				}
-			}
+            pageLsn = ctx.interiorFrame.getPageLsn();
+            ctx.pathList.add(pageId, pageLsn, -1);
 
-			if (pageId != rootPage
-					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
-				// Concurrent split detected, go back to parent and re-choose
-				// the best child
-				if (writeLatched) {
-					node.releaseWriteLatch();
-					incrementWriteLatchesReleased();
-					bufferCache.unpin(node);
-					incrementUnpins();
-					writeLatched = false;
-				} else {
-					node.releaseReadLatch();
-					incrementReadLatchesReleased();
-					bufferCache.unpin(node);
-					incrementUnpins();
-				}
+            if (!isLeaf) {
+                // findBestChild must be called *before* getBestChildPageId
+                ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
+                int childPageId = ctx.interiorFrame.getBestChildPageId();
 
-				pageId = ctx.pathList.getLastPageId();
-				if (pageId != rootPage) {
-					parentLsn = ctx.pathList
-							.getPageLsn(ctx.pathList.size() - 2);
-				}
-				ctx.pathList.moveLast();
-				continue;
-			}
+                if (!writeLatched) {
+                    node.releaseReadLatch();
+                    incrementReadLatchesReleased();
+                    // TODO: do we need to un-pin and pin again?
+                    bufferCache.unpin(node);
+                    incrementUnpins();
 
-			pageLsn = ctx.interiorFrame.getPageLsn();
-			ctx.pathList.add(pageId, pageLsn, -1);
+                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                    incrementPins();
+                    node.acquireWriteLatch();
+                    incrementWriteLatchesAcquired();
+                    ctx.interiorFrame.setPage(node);
+                    writeLatched = true;
 
-			if (!isLeaf) {
-				// findBestChild must be called *before* getBestChildPageId
-				ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
-				int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
+                    if (ctx.interiorFrame.getPageLsn() != pageLsn) {
+                        // The page was changed while we unlocked it; thus,
+                        // retry (re-choose best child)
 
-				if (!writeLatched) {
-					node.releaseReadLatch();
-					incrementReadLatchesReleased();
-					// TODO: do we need to un-pin and pin again?
-					bufferCache.unpin(node);
-					incrementUnpins();
+                        ctx.pathList.moveLast();
+                        continue;
+                    }
+                }
 
-					node = bufferCache.pin(
-							BufferedFileHandle.getDiskPageId(fileId, pageId),
-							false);
-					incrementPins();
-					node.acquireWriteLatch();
-					incrementWriteLatchesAcquired();
-					ctx.interiorFrame.setPage(node);
-					writeLatched = true;
+                // We don't need to reset the frameTuple because it is
+                // already pointing to the best child
+                ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
 
-					if (ctx.interiorFrame.getPageLsn() != pageLsn) {
-						// The page was changed while we unlocked it; thus,
-						// retry (re-choose best child)
+                node.releaseWriteLatch();
+                incrementWriteLatchesReleased();
+                bufferCache.unpin(node);
+                incrementUnpins();
+                writeLatched = false;
 
-						ctx.pathList.moveLast();
-						continue;
-					}
-				}
+                pageId = childPageId;
+                parentLsn = pageLsn;
+            } else {
+                ctx.leafFrame.setPage(node);
+                return node;
+            }
+        }
+    }
 
-				// We don't need to reset the frameTuple because it is
-				// already pointing to the best child
-				ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
+    private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
+            throws HyracksDataException, TreeIndexException {
+    	FrameOpSpaceStatus spaceStatus;
+        if (!isLeaf) {
+            spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple);
+        } else {
+            spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple);
+        }
 
-				node.releaseWriteLatch();
-				incrementWriteLatchesReleased();
-				bufferCache.unpin(node);
-				incrementUnpins();
-				writeLatched = false;
+        switch (spaceStatus) {
+            case SUFFICIENT_CONTIGUOUS_SPACE: {
+                if (!isLeaf) {
+                    ctx.interiorFrame.insert(tuple, -1);
+                    incrementGlobalNsn();
+                    ctx.interiorFrame.setPageLsn(getGlobalNsn());
+                } else {
+                    ctx.leafFrame.insert(tuple, -1);
+                    incrementGlobalNsn();
+                    ctx.leafFrame.setPageLsn(getGlobalNsn());
+                }
+                ctx.splitKey.reset();
+                break;
+            }
 
-				pageId = childPageId;
-				parentLsn = pageLsn;
-			} else {
-				ctx.leafFrame.setPage(node);
-				return node;
-			}
-		}
-	}
+            case SUFFICIENT_SPACE: {
+                if (!isLeaf) {
+                    ctx.interiorFrame.compact();
+                    ctx.interiorFrame.insert(tuple, -1);
+                    incrementGlobalNsn();
+                    ctx.interiorFrame.setPageLsn(getGlobalNsn());
+                } else {
+                    ctx.leafFrame.compact();
+                    ctx.leafFrame.insert(tuple, -1);
+                    incrementGlobalNsn();
+                    ctx.leafFrame.setPageLsn(getGlobalNsn());
+                }
+                ctx.splitKey.reset();
+                break;
+            }
 
-	private void insertTuple(ICachedPage node, int pageId,
-			ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
-			throws Exception {
-		FrameOpSpaceStatus spaceStatus;
-		if (!isLeaf) {
-			spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
-		} else {
-			spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
-		}
+            case INSUFFICIENT_SPACE: {
+                int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+                ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+                incrementPins();
+                rightNode.acquireWriteLatch();
+                incrementWriteLatchesAcquired();
 
-		switch (spaceStatus) {
-		case SUFFICIENT_CONTIGUOUS_SPACE: {
-			if (!isLeaf) {
-				ctx.interiorFrame.insert(tuple, cmp, -1);
-				incrementGlobalNsn();
-				ctx.interiorFrame.setPageLsn(getGlobalNsn());
-			} else {
-				ctx.leafFrame.insert(tuple, cmp, -1);
-				incrementGlobalNsn();
-				ctx.leafFrame.setPageLsn(getGlobalNsn());
-			}
-			ctx.splitKey.reset();
-			break;
-		}
+                try {
+                    IRTreeFrame rightFrame;
+                    numOfPages++; // debug
+                    if (!isLeaf) {
+                        splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
+                        rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+                        rightFrame.setPage(rightNode);
+                        rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+                        ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey);
+                        ctx.interiorFrame.setRightPage(rightPageId);
+                        rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
+                        incrementGlobalNsn();
+                        long newNsn = getGlobalNsn();
+                        rightFrame.setPageLsn(newNsn);
+                        ctx.interiorFrame.setPageNsn(newNsn);
+                        ctx.interiorFrame.setPageLsn(newNsn);
+                    } else {
+                        splitsByLevel[0]++; // debug
+                        rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+                        rightFrame.setPage(rightNode);
+                        rightFrame.initBuffer((byte) 0);
+                        ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
+                        ctx.leafFrame.setRightPage(rightPageId);
+                        rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
+                        incrementGlobalNsn();
+                        long newNsn = getGlobalNsn();
+                        rightFrame.setPageLsn(newNsn);
+                        ctx.leafFrame.setPageNsn(newNsn);
+                        ctx.leafFrame.setPageLsn(newNsn);
+                    }
+                    ctx.splitKey.setPages(pageId, rightPageId);
+                    if (pageId == rootPage) {
+                        rootSplits++; // debug
+                        splitsByLevel[currentLevel]++;
+                        currentLevel++;
 
-		case SUFFICIENT_SPACE: {
-			if (!isLeaf) {
-				ctx.interiorFrame.compact(cmp);
-				ctx.interiorFrame.insert(tuple, cmp, -1);
-				incrementGlobalNsn();
-				ctx.interiorFrame.setPageLsn(getGlobalNsn());
-			} else {
-				ctx.leafFrame.compact(cmp);
-				ctx.leafFrame.insert(tuple, cmp, -1);
-				incrementGlobalNsn();
-				ctx.leafFrame.setPageLsn(getGlobalNsn());
-			}
-			ctx.splitKey.reset();
-			break;
-		}
+                        int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+                        ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
+                                true);
+                        incrementPins();
+                        newLeftNode.acquireWriteLatch();
+                        incrementWriteLatchesAcquired();
+                        try {
+                            // copy left child to new left child
+                            System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0,
+                                    newLeftNode.getBuffer().capacity());
 
-		case INSUFFICIENT_SPACE: {
-			int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
-			ICachedPage rightNode = bufferCache
-					.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
-							true);
-			incrementPins();
-			rightNode.acquireWriteLatch();
-			incrementWriteLatchesAcquired();
+                            // initialize new root (leftNode becomes new root)
+                            ctx.interiorFrame.setPage(node);
+                            ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
 
-			try {
-				IRTreeFrame rightFrame;
-				int ret;
-				numOfPages++; // debug
-				if (!isLeaf) {
-					splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
-					rightFrame = (IRTreeFrame) interiorFrameFactory
-							.createFrame();
-					rightFrame.setPage(rightNode);
-					rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
-					rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-					ret = ctx.interiorFrame.split(rightFrame, tuple, cmp,
-							ctx.splitKey);
-					ctx.interiorFrame.setRightPage(rightPageId);
-					rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
-					incrementGlobalNsn();
-					int newNsn = getGlobalNsn();
-					rightFrame.setPageLsn(newNsn);
-					ctx.interiorFrame.setPageNsn(newNsn);
-					ctx.interiorFrame.setPageLsn(newNsn);
-				} else {
-					splitsByLevel[0]++; // debug
-					rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
-					rightFrame.setPage(rightNode);
-					rightFrame.initBuffer((byte) 0);
-					rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-					ret = ctx.leafFrame.split(rightFrame, tuple, cmp,
-							ctx.splitKey);
-					ctx.leafFrame.setRightPage(rightPageId);
-					rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
-					incrementGlobalNsn();
-					int newNsn = getGlobalNsn();
-					rightFrame.setPageLsn(newNsn);
-					ctx.leafFrame.setPageNsn(newNsn);
-					ctx.leafFrame.setPageLsn(newNsn);
-				}
-				if (ret != 0) {
-					ctx.splitKey.reset();
-				} else {
-					ctx.splitKey.setPages(pageId, rightPageId);
-				}
-				if (pageId == rootPage) {
-					rootSplits++; // debug
-					splitsByLevel[currentLevel]++;
-					currentLevel++;
+                            ctx.splitKey.setLeftPage(newLeftId);
 
-					int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
-					ICachedPage newLeftNode = bufferCache
-							.pin(BufferedFileHandle.getDiskPageId(fileId,
-									newLeftId), true);
-					incrementPins();
-					newLeftNode.acquireWriteLatch();
-					incrementWriteLatchesAcquired();
-					try {
-						// copy left child to new left child
-						System.arraycopy(node.getBuffer().array(), 0,
-								newLeftNode.getBuffer().array(), 0, newLeftNode
-										.getBuffer().capacity());
+                            ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), -1);
+                            ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), -1);
 
-						// initialize new root (leftNode becomes new root)
-						ctx.interiorFrame.setPage(node);
-						ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame
-								.getLevel() + 1));
+                            incrementGlobalNsn();
+                            long newNsn = getGlobalNsn();
+                            ctx.interiorFrame.setPageLsn(newNsn);
+                            ctx.interiorFrame.setPageNsn(newNsn);
+                        } finally {
+                            newLeftNode.releaseWriteLatch();
+                            incrementWriteLatchesReleased();
+                            bufferCache.unpin(newLeftNode);
+                            incrementUnpins();
+                        }
 
-						ctx.splitKey.setLeftPage(newLeftId);
+                        ctx.splitKey.reset();
+                    }
+                } finally {
+                    rightNode.releaseWriteLatch();
+                    incrementWriteLatchesReleased();
+                    bufferCache.unpin(rightNode);
+                    incrementUnpins();
+                }
+                break;
+            }
+        }
+    }
 
-						ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(),
-								cmp, -1);
-						ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(),
-								cmp, -1);
+    public void updateParentForInsert(RTreeOpContext ctx) throws HyracksDataException, TreeIndexException {
+        int parentId = ctx.pathList.getLastPageId();
+        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+        incrementPins();
+        parentNode.acquireWriteLatch();
+        incrementWriteLatchesAcquired();
+        ctx.interiorFrame.setPage(parentNode);
+        boolean foundParent = true;
 
-						incrementGlobalNsn();
-						int newNsn = getGlobalNsn();
-						ctx.interiorFrame.setPageLsn(newNsn);
-						ctx.interiorFrame.setPageNsn(newNsn);
-					} finally {
-						newLeftNode.releaseWriteLatch();
-						incrementWriteLatchesReleased();
-						bufferCache.unpin(newLeftNode);
-						incrementUnpins();
-					}
+        if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+            foundParent = false;
+            while (true) {
+                if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp) != -1) {
+                    // found the parent
+                    foundParent = true;
+                    break;
+                }
+                int rightPage = ctx.interiorFrame.getRightPage();
+                parentNode.releaseWriteLatch();
+                incrementWriteLatchesReleased();
+                bufferCache.unpin(parentNode);
+                incrementUnpins();
 
-					ctx.splitKey.reset();
-				}
-			} finally {
-				rightNode.releaseWriteLatch();
-				incrementWriteLatchesReleased();
-				bufferCache.unpin(rightNode);
-				incrementUnpins();
-			}
-			break;
-		}
-		}
-	}
+                if (rightPage == -1) {
+                    break;
+                }
 
-	public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
-		int parentId = ctx.pathList.getLastPageId();
-		ICachedPage parentNode = bufferCache.pin(
-				BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-		incrementPins();
-		parentNode.acquireWriteLatch();
-		incrementWriteLatchesAcquired();
-		ctx.interiorFrame.setPage(parentNode);
-		boolean foundParent = true;
+                parentId = rightPage;
+                parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+                incrementPins();
+                parentNode.acquireWriteLatch();
+                incrementWriteLatchesAcquired();
+                ctx.interiorFrame.setPage(parentNode);
+            }
+        }
+        if (foundParent) {
+            ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
+            insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
+            ctx.pathList.moveLast();
 
-		if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
-			foundParent = false;
-			while (true) {
-				if (ctx.interiorFrame.findTupleByPointer(
-						ctx.splitKey.getLeftTuple(), cmp) != -1) {
-					// found the parent
-					foundParent = true;
-					break;
-				}
-				int rightPage = ctx.interiorFrame.getRightPage();
-				parentNode.releaseWriteLatch();
-				incrementWriteLatchesReleased();
-				bufferCache.unpin(parentNode);
-				incrementUnpins();
+            parentNode.releaseWriteLatch();
+            incrementWriteLatchesReleased();
+            bufferCache.unpin(parentNode);
+            incrementUnpins();
+            return;
+        }
 
-				if (rightPage == -1) {
-					break;
-				}
+        // very rare situation when the there is a root split, do an exhaustive
+        // breadth-first traversal looking for the parent tuple
 
-				parentId = rightPage;
-				parentNode = bufferCache.pin(
-						BufferedFileHandle.getDiskPageId(fileId, parentId),
-						false);
-				incrementPins();
-				parentNode.acquireWriteLatch();
-				incrementWriteLatchesAcquired();
-				ctx.interiorFrame.setPage(parentNode);
-			}
-		}
-		if (foundParent) {
-			ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
-			insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(),
-					ctx, ctx.interiorFrame.isLeaf());
-			ctx.pathList.moveLast();
+        ctx.pathList.clear();
+        ctx.traverseList.clear();
+        findPath(ctx);
+        updateParentForInsert(ctx);
+    }
 
-			parentNode.releaseWriteLatch();
-			incrementWriteLatchesReleased();
-			bufferCache.unpin(parentNode);
-			incrementUnpins();
-			return;
-		}
+    public void findPath(RTreeOpContext ctx) throws HyracksDataException {
+        int pageId = rootPage;
+        int parentIndex = -1;
+        long parentLsn = 0;
+        long pageLsn;
+        int pageIndex;
+        ctx.traverseList.add(pageId, -1, parentIndex);
+        while (!ctx.traverseList.isLast()) {
+            pageId = ctx.traverseList.getFirstPageId();
+            parentIndex = ctx.traverseList.getFirstPageIndex();
 
-		// very rare situation when the there is a root split, do an exhaustive
-		// breadth-first traversal looking for the parent tuple
-
-		ctx.pathList.clear();
-		ctx.traverseList.clear();
-		findPath(ctx);
-		updateParentForInsert(ctx);
-	}
-
-	public void findPath(RTreeOpContext ctx) throws Exception {
-		int pageId = rootPage;
-		int parentIndex = -1;
-		int parentLsn = 0;
-		int pageLsn, pageIndex;
-		ctx.traverseList.add(pageId, -1, parentIndex);
-		while (!ctx.traverseList.isLast()) {
-			pageId = ctx.traverseList.getFirstPageId();
-			parentIndex = ctx.traverseList.getFirstPageIndex();
-
-			ICachedPage node = bufferCache.pin(
-					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-			incrementPins();
-			node.acquireReadLatch();
-			incrementReadLatchesAcquired();
-			ctx.interiorFrame.setPage(node);
-			pageLsn = ctx.interiorFrame.getPageLsn();
-			pageIndex = ctx.traverseList.first();
-			ctx.traverseList.setPageLsn(pageIndex, pageLsn);
-
-			ctx.traverseList.moveFirst();
+            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+            incrementPins();
+            node.acquireReadLatch();
+            incrementReadLatchesAcquired();
+            ctx.interiorFrame.setPage(node);
+            pageLsn = ctx.interiorFrame.getPageLsn();
+            pageIndex = ctx.traverseList.first();
+            ctx.traverseList.setPageLsn(pageIndex, pageLsn);
 
-			if (pageId != rootPage
-					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
-				int rightPage = ctx.interiorFrame.getRightPage();
-				if (rightPage != -1) {
-					ctx.traverseList.add(rightPage, -1, parentIndex);
-				}
-			}
-			parentLsn = pageLsn;
+            ctx.traverseList.moveFirst();
 
-			if (ctx.interiorFrame.findTupleByPointer(
-					ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
-					cmp) != -1) {
-				fillPath(ctx, pageIndex);
+            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                int rightPage = ctx.interiorFrame.getRightPage();
+                if (rightPage != -1) {
+                    ctx.traverseList.add(rightPage, -1, parentIndex);
+                }
+            }
+            parentLsn = pageLsn;
 
-				node.releaseReadLatch();
-				incrementReadLatchesReleased();
-				bufferCache.unpin(node);
-				incrementUnpins();
-				return;
-			}
-			node.releaseReadLatch();
-			incrementReadLatchesReleased();
-			bufferCache.unpin(node);
-			incrementUnpins();
-		}
-	}
+            if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex, cmp) != -1) {
+                fillPath(ctx, pageIndex);
 
-	public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
-		if (pageIndex != -1) {
-			fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
-			ctx.pathList.add(ctx.traverseList.getPageId(pageIndex),
-					ctx.traverseList.getPageLsn(pageIndex), -1);
-		}
-	}
+                node.releaseReadLatch();
+                incrementReadLatchesReleased();
+                bufferCache.unpin(node);
+                incrementUnpins();
+                return;
+            }
+            node.releaseReadLatch();
+            incrementReadLatchesReleased();
+            bufferCache.unpin(node);
+            incrementUnpins();
+        }
+    }
 
-	@Override
-	public void delete(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception {
-		RTreeOpContext ctx = (RTreeOpContext) ictx;
-		ctx.reset();
-		ctx.setTuple(tuple);
-		ctx.splitKey.reset();
-		ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
-		ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-		ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+    public void fillPath(RTreeOpContext ctx, int pageIndex) {
+        if (pageIndex != -1) {
+            fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
+            ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
+        }
+    }
 
-		int tupleIndex = findTupleToDelete(ctx);
+    @Override
+    public void delete(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException, TreeIndexException {
+        RTreeOpContext ctx = (RTreeOpContext) ictx;
+        ctx.reset();
+        ctx.setTuple(tuple);
+        ctx.splitKey.reset();
+        ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
 
-		if (tupleIndex != -1) {
-			int pageId = ctx.pathList.getLastPageId();
-			ctx.pathList.moveLast();
-			deleteTuple(pageId, tupleIndex, ctx);
+        int tupleIndex = findTupleToDelete(ctx);
 
-			while (true) {
-				if (ctx.splitKey.getLeftPageBuffer() != null) {
-					updateParentForDelete(ctx);
-				} else {
-					break;
-				}
-			}
+        if (tupleIndex != -1) {
+            int pageId = ctx.pathList.getLastPageId();
+            ctx.pathList.moveLast();
+            deleteTuple(pageId, tupleIndex, ctx);
 
-			ctx.leafFrame.getPage().releaseWriteLatch();
-			incrementWriteLatchesReleased();
-			bufferCache.unpin(ctx.leafFrame.getPage());
-			incrementUnpins();
-		}
-	}
+            while (true) {
+                if (ctx.splitKey.getLeftPageBuffer() != null) {
+                    updateParentForDelete(ctx);
+                } else {
+                    break;
+                }
+            }
 
-	public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
-		int parentId = ctx.pathList.getLastPageId();
-		ICachedPage parentNode = bufferCache.pin(
-				BufferedFileHandle.getDiskPageId(fileId, parentId), false);
-		incrementPins();
-		parentNode.acquireWriteLatch();
-		incrementWriteLatchesAcquired();
-		ctx.interiorFrame.setPage(parentNode);
-		boolean foundParent = true;
-		int tupleIndex = -1;
+            ctx.leafFrame.getPage().releaseWriteLatch();
+            incrementWriteLatchesReleased();
+            bufferCache.unpin(ctx.leafFrame.getPage());
+            incrementUnpins();
+        }
+    }
 
-		if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
-			foundParent = false;
-			while (true) {
-				tupleIndex = ctx.interiorFrame.findTupleByPointer(
-						ctx.splitKey.getLeftTuple(), cmp);
-				if (tupleIndex != -1) {
-					// found the parent
-					foundParent = true;
-					break;
-				}
-				int rightPage = ctx.interiorFrame.getRightPage();
-				parentNode.releaseWriteLatch();
-				incrementWriteLatchesReleased();
-				bufferCache.unpin(parentNode);
-				incrementUnpins();
+    public void updateParentForDelete(RTreeOpContext ctx) throws HyracksDataException {
+        int parentId = ctx.pathList.getLastPageId();
+        ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+        incrementPins();
+        parentNode.acquireWriteLatch();
+        incrementWriteLatchesAcquired();
+        ctx.interiorFrame.setPage(parentNode);
+        boolean foundParent = true;
+        int tupleIndex = -1;
 
-				if (rightPage == -1) {
-					break;
-				}
+        if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+            foundParent = false;
+            while (true) {
+                tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+                if (tupleIndex != -1) {
+                    // found the parent
+                    foundParent = true;
+                    break;
+                }
+                int rightPage = ctx.interiorFrame.getRightPage();
+                parentNode.releaseWriteLatch();
+                incrementWriteLatchesReleased();
+                bufferCache.unpin(parentNode);
+                incrementUnpins();
 
-				parentId = rightPage;
-				parentNode = bufferCache.pin(
-						BufferedFileHandle.getDiskPageId(fileId, parentId),
-						false);
-				incrementPins();
-				parentNode.acquireWriteLatch();
-				incrementWriteLatchesAcquired();
-				ctx.interiorFrame.setPage(parentNode);
-			}
-		}
-		if (foundParent) {
-			if (tupleIndex == -1) {
-				tupleIndex = ctx.interiorFrame.findTupleByPointer(
-						ctx.splitKey.getLeftTuple(), cmp);
-			}
-			boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(
-					ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+                if (rightPage == -1) {
+                    break;
+                }
 
-			if (recomputeMBR) {
-				ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(),
-						tupleIndex, cmp);
-				ctx.pathList.moveLast();
+                parentId = rightPage;
+                parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+                incrementPins();
+                parentNode.acquireWriteLatch();
+                incrementWriteLatchesAcquired();
+                ctx.interiorFrame.setPage(parentNode);
+            }
+        }
+        if (foundParent) {
+            if (tupleIndex == -1) {
+                tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+            }
+            boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
 
-				incrementGlobalNsn();
-				ctx.interiorFrame.setPageLsn(getGlobalNsn());
+            if (recomputeMBR) {
+                ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+                ctx.pathList.moveLast();
 
-				ctx.splitKey.reset();
-				if (!ctx.pathList.isEmpty()) {
-					ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
-					ctx.splitKey.setLeftPage(parentId);
-				}
-			} else {
-				ctx.pathList.moveLast();
-				ctx.splitKey.reset();
-			}
+                incrementGlobalNsn();
+                ctx.interiorFrame.setPageLsn(getGlobalNsn());
 
-			parentNode.releaseWriteLatch();
-			incrementWriteLatchesReleased();
-			bufferCache.unpin(parentNode);
-			incrementUnpins();
-			return;
-		}
+                ctx.splitKey.reset();
+                if (!ctx.pathList.isEmpty()) {
+                    ctx.interiorFrame.computeMBR(ctx.splitKey);
+                    ctx.splitKey.setLeftPage(parentId);
+                }
+            } else {
+                ctx.pathList.moveLast();
+                ctx.splitKey.reset();
+            }
 
-		// very rare situation when the there is a root split, do an exhaustive
-		// breadth-first traversal looking for the parent tuple
+            parentNode.releaseWriteLatch();
+            incrementWriteLatchesReleased();
+            bufferCache.unpin(parentNode);
+            incrementUnpins();
+            return;
+        }
 
-		ctx.pathList.clear();
-		ctx.traverseList.clear();
-		findPath(ctx);
-		updateParentForDelete(ctx);
-	}
+        // very rare situation when the there is a root split, do an exhaustive
+        // breadth-first traversal looking for the parent tuple
 
-	public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
+        ctx.pathList.clear();
+        ctx.traverseList.clear();
+        findPath(ctx);
+        updateParentForDelete(ctx);
+    }
 
-		ctx.traverseList.add(rootPage, -1, -1);
-		ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
+    public int findTupleToDelete(RTreeOpContext ctx) throws HyracksDataException {
 
-		while (!ctx.pathList.isEmpty()) {
-			int pageId = ctx.pathList.getLastPageId();
-			int parentLsn = ctx.pathList.getLastPageLsn();
-			int pageIndex = ctx.pathList.getLastPageIndex();
-			ctx.pathList.moveLast();
-			ICachedPage node = bufferCache.pin(
-					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
-			incrementPins();
-			node.acquireReadLatch();
-			incrementReadLatchesAcquired();
-			ctx.interiorFrame.setPage(node);
-			boolean isLeaf = ctx.interiorFrame.isLeaf();
-			int pageLsn = ctx.interiorFrame.getPageLsn();
-			int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
-			ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+        ctx.traverseList.add(rootPage, -1, -1);
+        ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
 
-			if (pageId != rootPage
-					&& parentLsn < ctx.interiorFrame.getPageNsn()) {
-				// Concurrent split detected, we need to visit the right page
-				int rightPage = ctx.interiorFrame.getRightPage();
-				if (rightPage != -1) {
-					ctx.traverseList.add(rightPage, -1, parentIndex);
-					ctx.pathList.add(rightPage, parentLsn,
-							ctx.traverseList.size() - 1);
-				}
-			}
+        while (!ctx.pathList.isEmpty()) {
+            int pageId = ctx.pathList.getLastPageId();
+            long parentLsn = ctx.pathList.getLastPageLsn();
+            int pageIndex = ctx.pathList.getLastPageIndex();
+            ctx.pathList.moveLast();
+            ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+            incrementPins();
+            node.acquireReadLatch();
+            incrementReadLatchesAcquired();
+            ctx.interiorFrame.setPage(node);
+            boolean isLeaf = ctx.interiorFrame.isLeaf();
+            long pageLsn = ctx.interiorFrame.getPageLsn();
+            int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
+            ctx.traverseList.setPageLsn(pageIndex, pageLsn);
 
-			if (!isLeaf) {
-				for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
-					int childPageId = ctx.interiorFrame
-							.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
-					if (childPageId != -1) {
-						ctx.traverseList.add(childPageId, -1, pageIndex);
-						ctx.pathList.add(childPageId, pageLsn,
-								ctx.traverseList.size() - 1);
-					}
-				}
-			} else {
-				ctx.leafFrame.setPage(node);
-				int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
-				if (tupleIndex != -1) {
+            if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+                // Concurrent split detected, we need to visit the right page
+                int rightPage = ctx.interiorFrame.getRightPage();
+                if (rightPage != -1) {
+                    ctx.traverseList.add(rightPage, -1, parentIndex);
+                    ctx.pathList.add(rightPage, parentLsn, ctx.traverseList.size() - 1);
+                }
+            }
 
-					node.releaseReadLatch();
-					incrementReadLatchesReleased();
-					bufferCache.unpin(node);
-					incrementUnpins();
+            if (!isLeaf) {
+                for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
+                    int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
+                    if (childPageId != -1) {
+                        ctx.traverseList.add(childPageId, -1, pageIndex);
+                        ctx.pathList.add(childPageId, pageLsn, ctx.traverseList.size() - 1);
+                    }
+                }
+            } else {
+                ctx.leafFrame.setPage(node);
+                int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+                if (tupleIndex != -1) {
 
-					node = bufferCache.pin(
-							BufferedFileHandle.getDiskPageId(fileId, pageId),
-							false);
-					incrementPins();
-					node.acquireWriteLatch();
-					incrementWriteLatchesAcquired();
-					ctx.leafFrame.setPage(node);
+                    node.releaseReadLatch();
+                    incrementReadLatchesReleased();
+                    bufferCache.unpin(node);
+                    incrementUnpins();
 
-					if (ctx.leafFrame.getPageLsn() != pageLsn) {
-						// The page was changed while we unlocked it
+                    node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                    incrementPins();
+                    node.acquireWriteLatch();
+                    incrementWriteLatchesAcquired();
+                    ctx.leafFrame.setPage(node);
 
-						tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple,
-								cmp);
-						if (tupleIndex == -1) {
-							ctx.traverseList.add(pageId, -1, parentIndex);
-							ctx.pathList.add(pageId, parentLsn,
-									ctx.traverseList.size() - 1);
+                    if (ctx.leafFrame.getPageLsn() != pageLsn) {
+                        // The page was changed while we unlocked it
 
-							node.releaseWriteLatch();
-							incrementWriteLatchesReleased();
-							bufferCache.unpin(node);
-							incrementUnpins();
-							continue;
-						} else {
-							ctx.pathList.clear();
-							fillPath(ctx, pageIndex);
-							return tupleIndex;
-						}
-					} else {
-						ctx.pathList.clear();
-						fillPath(ctx, pageIndex);
-						return tupleIndex;
-					}
-				}
-			}
-			node.releaseReadLatch();
-			incrementReadLatchesReleased();
-			bufferCache.unpin(node);
-			incrementUnpins();
-		}
-		return -1;
-	}
+                        tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+                        if (tupleIndex == -1) {
+                            ctx.traverseList.add(pageId, -1, parentIndex);
+                            ctx.pathList.add(pageId, parentLsn, ctx.traverseList.size() - 1);
 
-	public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx)
-			throws Exception {
-		ctx.leafFrame.delete(tupleIndex, cmp);
-		incrementGlobalNsn();
-		ctx.leafFrame.setPageLsn(getGlobalNsn());
+                            node.releaseWriteLatch();
+                            incrementWriteLatchesReleased();
+                            bufferCache.unpin(node);
+                            incrementUnpins();
+                            continue;
+                        } else {
+                            ctx.pathList.clear();
+                            fillPath(ctx, pageIndex);
+                            return tupleIndex;
+                        }
+                    } else {
+                        ctx.pathList.clear();
+                        fillPath(ctx, pageIndex);
+                        return tupleIndex;
+                    }
+                }
+            }
+            node.releaseReadLatch();
+            incrementReadLatchesReleased();
+            bufferCache.unpin(node);
+            incrementUnpins();
+        }
+        return -1;
+    }
 
-		// if the page is empty, just leave it there for future inserts
-		if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
-			ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
-			ctx.splitKey.setLeftPage(pageId);
-		}
-	}
+    public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx) throws HyracksDataException {
+        ctx.leafFrame.delete(tupleIndex, cmp);
+        incrementGlobalNsn();
+        ctx.leafFrame.setPageLsn(getGlobalNsn());
 
-	public void search(ITreeIndexCursor cursor, SearchPredicate pred,
-			RTreeOpContext ctx) throws Exception {
-		ctx.reset();
-		ctx.cursor = cursor;
+        // if the page is empty, just leave it there for future inserts
+        if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
+            ctx.leafFrame.computeMBR(ctx.splitKey);
+            ctx.splitKey.setLeftPage(pageId);
+        }
+    }
 
-		cursor.setBufferCache(bufferCache);
-		cursor.setFileId(fileId);
-		ctx.cursorInitialState.setRootPage(rootPage);
-		ctx.cursor.open(ctx.cursorInitialState, pred);
-	}
+    public void search(ITreeIndexCursor cursor, SearchPredicate pred, RTreeOpContext ctx) throws Exception {
+        ctx.reset();
+        ctx.cursor = cursor;
 
-	public ITreeIndexFrameFactory getInteriorFrameFactory() {
-		return interiorFrameFactory;
-	}
+        cursor.setBufferCache(bufferCache);
+        cursor.setFileId(fileId);
+        ctx.cursorInitialState.setRootPage(rootPage);
+        ctx.cursor.open(ctx.cursorInitialState, pred);
+    }
 
-	public ITreeIndexFrameFactory getLeafFrameFactory() {
-		return leafFrameFactory;
-	}
+    public ITreeIndexFrameFactory getInteriorFrameFactory() {
+        return interiorFrameFactory;
+    }
 
-	public MultiComparator getCmp() {
-		return cmp;
-	}
+    public ITreeIndexFrameFactory getLeafFrameFactory() {
+        return leafFrameFactory;
+    }
 
-	public IFreePageManager getFreePageManager() {
-		return freePageManager;
-	}
+    public MultiComparator getCmp() {
+        return cmp;
+    }
 
-	@Override
-	public void update(ITupleReference tuple, IndexOpContext ictx)
-			throws Exception {
-		throw new Exception("RTree Update not implemented.");
-	}
+    public IFreePageManager getFreePageManager() {
+        return freePageManager;
+    }
 
-	public final class BulkLoadContext implements IIndexBulkLoadContext {
+    @Override
+    public void update(ITupleReference tuple, IIndexOpContext ictx) {
+        throw new UnsupportedOperationException("RTree Update not implemented.");
+    }
 
-		public RTreeOpContext insertOpCtx;
+    public final class BulkLoadContext implements IIndexBulkLoadContext {
 
-		public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame,
-				IRTreeFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame)
-				throws HyracksDataException {
+        public RTreeOpContext insertOpCtx;
 
-			insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame,
-					interiorFrame, metaFrame);
-		}
-	}
+        public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+                ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+            insertOpCtx = createOpContext(IndexOp.INSERT);
+        }
+    }
 
-	@Override
-	public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
-			ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
-			ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-		if (loaded)
-			throw new HyracksDataException(
-					"Trying to bulk-load RTree but RTree has already been loaded.");
+    @Override
+    public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws HyracksDataException {
+        if (loaded) {
+            throw new HyracksDataException("Trying to bulk-load RTree but RTree has already been loaded.");
+        }
 
-		BulkLoadContext ctx = new BulkLoadContext(fillFactor,
-				(IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame, metaFrame);
-		return ctx;
-	}
+        BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IRTreeFrame) leafFrameFactory.createFrame(), (IRTreeFrame) interiorFrameFactory.createFrame(),
+                freePageManager.getMetaDataFrameFactory().createFrame());
+        return ctx;
+    }
 
-	@Override
-	public void bulkLoadAddTuple(IIndexBulkLoadContext ictx,
-			ITupleReference tuple) throws HyracksDataException {
-		try {
-			insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
-		} catch (Exception e) {
-			throw new HyracksDataException("BulkLoad Error");
-		}
-	}
+    @Override
+    public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException {
+        try {
+            insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
+        } catch (Exception e) {
+            throw new HyracksDataException("BulkLoad Error");
+        }
+    }
 
-	@Override
-	public void endBulkLoad(IIndexBulkLoadContext ictx)
-			throws HyracksDataException {
-		loaded = true;
-	}
+    @Override
+    public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+        loaded = true;
+    }
 
-	@Override
-	public void diskOrderScan(ITreeIndexCursor icursor,
-			ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
-			IndexOpContext ictx) throws HyracksDataException {
-		TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
-		RTreeOpContext ctx = (RTreeOpContext) ictx;
-		ctx.reset();
+    @Override
+    public void diskOrderScan(ITreeIndexCursor icursor, IIndexOpContext ictx) throws HyracksDataException {
+        TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+        RTreeOpContext ctx = (RTreeOpContext) ictx;
+        ctx.reset();
 
-		int currentPageId = rootPage + 1;
-		int maxPageId = freePageManager.getMaxPage(metaFrame);
+        int currentPageId = rootPage + 1;
+        int maxPageId = freePageManager.getMaxPage(ctx.metaFrame);
 
-		ICachedPage page = bufferCache.pin(
-				BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
-		page.acquireReadLatch();
-		cursor.setBufferCache(bufferCache);
-		cursor.setFileId(fileId);
-		cursor.setCurrentPageId(currentPageId);
-		cursor.setMaxPageId(maxPageId);
-		ctx.cursorInitialState.setPage(page);
-		cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
-	}
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+        page.acquireReadLatch();
+        cursor.setBufferCache(bufferCache);
+        cursor.setFileId(fileId);
+        cursor.setCurrentPageId(currentPageId);
+        cursor.setMaxPageId(maxPageId);
+        ctx.cursorInitialState.setPage(page);
+        cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+    }
 
-	@Override
-	public int getRootPageId() {
-		return rootPage;
-	}
+    @Override
+    public int getRootPageId() {
+        return rootPage;
+    }
 
-	@Override
-	public int getFieldCount() {
-		return cmp.getFieldCount();
-	}
+    @Override
+    public int getFieldCount() {
+        return fieldCount;
+    }
 
-	@Override
-	public IndexType getIndexType() {
-		return IndexType.RTREE;
-	}
-}
\ No newline at end of file
+    @Override
+    public IndexType getIndexType() {
+        return IndexType.RTREE;
+    }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
index fc57019..dc4a753 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -16,31 +16,30 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 
-public final class RTreeOpContext implements IndexOpContext {
-	public final IndexOp op;
+public final class RTreeOpContext implements IIndexOpContext {
 	public final IRTreeInteriorFrame interiorFrame;
 	public final IRTreeLeafFrame leafFrame;
+	public IndexOp op;
 	public ITreeIndexCursor cursor;
 	public RTreeCursorInitialState cursorInitialState;
-	public final ITreeIndexMetaDataFrame metaFrame;
-	public final RTreeSplitKey splitKey;
+	public ITreeIndexMetaDataFrame metaFrame;
+	public RTreeSplitKey splitKey;
 	public ITupleReference tuple;
-	public final PathList pathList; // used to record the pageIds and pageLsns
-									// of the visited pages
-	public final PathList traverseList; // used for traversing the tree
+	public PathList pathList; // used to record the pageIds and pageLsns
+								// of the visited pages
+	public PathList traverseList; // used for traversing the tree
 	private static final int initTraverseListSize = 100;
 
 	public RTreeOpContext(IndexOp op, IRTreeLeafFrame leafFrame,
 			IRTreeInteriorFrame interiorFrame,
 			ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
-		this.op = op;
 		this.interiorFrame = interiorFrame;
 		this.leafFrame = leafFrame;
 		this.metaFrame = metaFrame;
@@ -74,4 +73,25 @@
 			traverseList.clear();
 		}
 	}
+
+	@Override
+	public void reset(IndexOp newOp) {
+		if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
+			if (splitKey == null) {
+				splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter()
+						.createTupleReference(), interiorFrame.getTupleWriter()
+						.createTupleReference());
+			}
+			if (traverseList == null) {
+				traverseList = new PathList(initTraverseListSize,
+						initTraverseListSize);
+			}
+
+		} else {
+			if (cursorInitialState == null) {
+				cursorInitialState = new RTreeCursorInitialState(pathList, 1);
+			}
+		}
+		this.op = newOp;
+	}
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
index 9d37d86..a138212 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
@@ -90,7 +90,7 @@
 		}
 		while (!pathList.isEmpty()) {
 			int pageId = pathList.getLastPageId();
-			int parentLsn = pathList.getLastPageLsn();
+			long parentLsn = pathList.getLastPageLsn();
 			pathList.moveLast();
 			ICachedPage node = bufferCache.pin(
 					BufferedFileHandle.getDiskPageId(fileId, pageId), false);
@@ -99,7 +99,7 @@
 			readLatched = true;
 			interiorFrame.setPage(node);
 			boolean isLeaf = interiorFrame.isLeaf();
-			int pageLsn = interiorFrame.getPageLsn();
+			long pageLsn = interiorFrame.getPageLsn();
 
 			if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
 				// Concurrent split detected, we need to visit the right page
@@ -192,7 +192,6 @@
 		}
 
 		pathList.add(this.rootPage, -1, -1);
-		frameTuple.setFieldCount(cmp.getFieldCount());
 		tupleIndex = 0;
 		fetchNextLeafPage();
 	}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
index 019760b..cb9b160 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -16,7 +16,7 @@
 package edu.uci.ics.hyracks.storage.am.rtree.impls;
 
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
 
 public class Rectangle {
 	private int dim;
@@ -49,26 +49,26 @@
 		high[i] = value;
 	}
 
-	public void set(ITupleReference tuple, MultiComparator cmp) {
+	public void set(ITupleReference tuple, IPrimitiveValueProvider[] valueProviders) {
 		for (int i = 0; i < getDim(); i++) {
 			int j = i + getDim();
-			setLow(i, cmp.getValueProviders()[i].getValue(
+			setLow(i, valueProviders[i].getValue(
 					tuple.getFieldData(i), tuple.getFieldStart(i)));
-			setHigh(i, cmp.getValueProviders()[j].getValue(
+			setHigh(i, valueProviders[j].getValue(
 					tuple.getFieldData(j), tuple.getFieldStart(j)));
 		}
 	}
 
-	public void enlarge(ITupleReference tupleToBeInserted, MultiComparator cmp) {
+	public void enlarge(ITupleReference tupleToBeInserted, IPrimitiveValueProvider[] valueProviders) {
 		for (int i = 0; i < getDim(); i++) {
 			int j = getDim() + i;
-			double low = cmp.getValueProviders()[i].getValue(
+			double low = valueProviders[i].getValue(
 					tupleToBeInserted.getFieldData(i),
 					tupleToBeInserted.getFieldStart(i));
 			if (getLow(i) > low) {
 				setLow(i, low);
 			}
-			double high = cmp.getValueProviders()[j].getValue(
+			double high = valueProviders[j].getValue(
 					tupleToBeInserted.getFieldData(j),
 					tupleToBeInserted.getFieldStart(j));
 			if (getHigh(i) < high) {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
index 7badb8e..260cdea 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
@@ -24,6 +24,7 @@
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
 
 public class UnorderedSlotManager extends AbstractSlotManager {
+	
 	@Override
 	public int findTupleIndex(ITupleReference searchKey,
 			ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
@@ -58,7 +59,7 @@
 					break;
 				}
 			}
-			int remainingFieldCount = multiCmp.getFieldCount()
+			int remainingFieldCount = frameTuple.getFieldCount()
 					- multiCmp.getKeyFieldCount();
 			for (int j = multiCmp.getKeyFieldCount(); j < multiCmp
 					.getKeyFieldCount() + remainingFieldCount; j++) {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
index 6acac7e..244c69e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
@@ -23,36 +23,35 @@
 
 public class RTreeTypeAwareTupleWriter extends TypeAwareTupleWriter {
 
-	public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
-		super(typeTraits);
-	}
+    public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+        super(typeTraits);
+    }
 
-	public int writeTupleFields(ITreeIndexTupleReference[] refs,
-			int startField, ByteBuffer targetBuf, int targetOff) {
-		int runner = targetOff;
-		int nullFlagsBytes = getNullFlagsBytes(refs.length);
-		// write null indicator bits
-		for (int i = 0; i < nullFlagsBytes; i++) {
-			targetBuf.put(runner++, (byte) 0);
-		}
+    public int writeTupleFields(ITreeIndexTupleReference[] refs, int startField, ByteBuffer targetBuf, int targetOff) {        
+    	int runner = targetOff;
+        int nullFlagsBytes = getNullFlagsBytes(refs.length);
+        // write null indicator bits
+        for (int i = 0; i < nullFlagsBytes; i++) {
+            targetBuf.put(runner++, (byte) 0);
+        }
 
-		// write field slots for variable length fields
-		// since the r-tree has fixed length keys, we don't actually need this?
-		encDec.reset(targetBuf.array(), runner);
-		for (int i = startField; i < startField + refs.length; i++) {
-			if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
-				encDec.encode(refs[i].getFieldLength(i));
-			}
-		}
-		runner = encDec.getPos();
+        // write field slots for variable length fields
+        // since the r-tree has fixed length keys, we don't actually need this?
+        encDec.reset(targetBuf.array(), runner);
+        for (int i = startField; i < startField + refs.length; i++) {
+            if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+                encDec.encode(refs[i].getFieldLength(i));
+            }
+        }
+        runner = encDec.getPos();
 
-		// write data
-		for (int i = 0; i < refs.length; i++) {
-			System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i),
-					targetBuf.array(), runner, refs[i].getFieldLength(i));
-			runner += refs[i].getFieldLength(i);
-		}
-		return runner - targetOff;
+        // write data
+        for (int i = 0; i < refs.length; i++) {
+            System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i), targetBuf.array(), runner,
+                    refs[i].getFieldLength(i));
+            runner += refs[i].getFieldLength(i);
+        }
+        return runner - targetOff;
 
-	}
+    }
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
index 7d88f2e..a27d8cd 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
@@ -19,19 +19,17 @@
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
 
-public class RTreeTypeAwareTupleWriterFactory implements
-		ITreeIndexTupleWriterFactory {
+public class RTreeTypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
 
-	private static final long serialVersionUID = 1L;
-	private ITypeTrait[] typeTraits;
+    private static final long serialVersionUID = 1L;
+    private ITypeTrait[] typeTraits;
 
-	public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
-		this.typeTraits = typeTraits;
-	}
+    public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+        this.typeTraits = typeTraits;
+    }
 
-	@Override
-	public ITreeIndexTupleWriter createTupleWriter() {
-		return new RTreeTypeAwareTupleWriter(typeTraits);
-	}
-
+    @Override    
+    public ITreeIndexTupleWriter createTupleWriter() {
+        return new RTreeTypeAwareTupleWriter(typeTraits);
+    }
 }
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
new file mode 100644
index 0000000..b312116
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/util/RTreeUtils.java
@@ -0,0 +1,102 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.util;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.FloatBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.FloatBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparator;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.FloatPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerPrimitiveValueProviderFactory;
+
+public class RTreeUtils {
+    public static IPrimitiveValueProvider comparatorToPrimitiveValueProvider(IBinaryComparator cmp) {
+        if (cmp instanceof IntegerBinaryComparator) {
+            return IntegerPrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
+        }
+        if (cmp instanceof FloatBinaryComparator) {
+            return FloatPrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
+        }
+        if (cmp instanceof DoubleBinaryComparator) {
+            return DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
+        }
+        throw new UnsupportedOperationException(
+                "Converting binary comparator to primitive value provider not implemented for: " + cmp.toString());
+    }
+
+    public static IPrimitiveValueProvider[] comparatorsToPrimitiveValueProviders(IBinaryComparator[] cmps) {
+        IPrimitiveValueProvider[] primitiveValueProviders = new IPrimitiveValueProvider[cmps.length];
+        for (int i = 0; i < cmps.length; i++) {
+            primitiveValueProviders[i] = comparatorToPrimitiveValueProvider(cmps[i]);
+        }
+        return primitiveValueProviders;
+    }
+
+    public static IPrimitiveValueProviderFactory comparatorToPrimitiveValueProviderFactory(IBinaryComparator cmp) {
+        if (cmp instanceof IntegerBinaryComparator) {
+            return IntegerPrimitiveValueProviderFactory.INSTANCE;
+        }
+        if (cmp instanceof FloatBinaryComparator) {
+            return FloatPrimitiveValueProviderFactory.INSTANCE;
+        }
+        if (cmp instanceof DoubleBinaryComparator) {
+            return DoublePrimitiveValueProviderFactory.INSTANCE;
+        }
+        throw new UnsupportedOperationException(
+                "Converting binary comparator to primitive value provider factory not implemented for: "
+                        + cmp.toString());
+    }
+
+    public static IPrimitiveValueProviderFactory[] comparatorsToPrimitiveValueProviderFactories(IBinaryComparator[] cmps) {
+        IPrimitiveValueProviderFactory[] primitiveValueProviders = new IPrimitiveValueProviderFactory[cmps.length];
+        for (int i = 0; i < cmps.length; i++) {
+            primitiveValueProviders[i] = comparatorToPrimitiveValueProviderFactory(cmps[i]);
+        }
+        return primitiveValueProviders;
+    }
+
+    public static IPrimitiveValueProviderFactory comparatorFactoryToPrimitiveValueProviderFactory(
+            IBinaryComparatorFactory cmpFactory) {
+        if (cmpFactory instanceof IntegerBinaryComparatorFactory) {
+            return IntegerPrimitiveValueProviderFactory.INSTANCE;
+        }
+        if (cmpFactory instanceof FloatBinaryComparatorFactory) {
+            return FloatPrimitiveValueProviderFactory.INSTANCE;
+        }
+        if (cmpFactory instanceof DoubleBinaryComparatorFactory) {
+            return DoublePrimitiveValueProviderFactory.INSTANCE;
+        }
+        throw new UnsupportedOperationException(
+                "Converting binary comparator factory to primitive value provider factory not implemented for: "
+                        + cmpFactory.toString());
+    }
+
+    public static IPrimitiveValueProviderFactory[] comparatorFactoriesToPrimitiveValueProviderFactories(
+            IBinaryComparatorFactory[] cmpFactories) {
+        IPrimitiveValueProviderFactory[] primitiveValueProviders = new IPrimitiveValueProviderFactory[cmpFactories.length];
+        for (int i = 0; i < cmpFactories.length; i++) {
+            primitiveValueProviders[i] = comparatorFactoryToPrimitiveValueProviderFactory(cmpFactories[i]);
+        }
+        return primitiveValueProviders;
+    }
+}
diff --git a/hyracks-storage-common/.classpath b/hyracks-storage-common/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-storage-common/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-storage-common/.project b/hyracks-storage-common/.project
deleted file mode 100644
index d990298..0000000
--- a/hyracks-storage-common/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-common</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 450f5c4..0000000
--- a/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:04 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-storage-common/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-common/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index a91dbc3..0000000
--- a/hyracks-storage-common/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Tue Aug 24 14:59:44 PDT 2010
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
new file mode 100644
index 0000000..dc00df0
--- /dev/null
+++ b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/DebugBufferCache.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.common.buffercache;
+
+import java.util.concurrent.atomic.AtomicLong;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+
+/**
+ * Implementation of an IBufferCache that counts the number of pins/unpins,
+ * latches/unlatches, and file create/delete/open/close called on it. It
+ * delegates the actual functionality to another IBufferCache set in the c'tor.
+ * The counters are updated in a thread-safe fashion using AtomicLong.
+ */
+public class DebugBufferCache implements IBufferCache {
+
+    // Actual BufferCache functionality is delegated to this bufferCache.
+    private final IBufferCache bufferCache;
+    private AtomicLong pinCount;
+    private AtomicLong unpinCount;
+    private AtomicLong readLatchCount;
+    private AtomicLong readUnlatchCount;
+    private AtomicLong writeLatchCount;
+    private AtomicLong writeUnlatchCount;
+    private AtomicLong createFileCount;
+    private AtomicLong deleteFileCount;
+    private AtomicLong openFileCount;
+    private AtomicLong closeFileCount;
+
+    public DebugBufferCache(IBufferCache bufferCache) {
+        this.bufferCache = bufferCache;
+        resetCounters();
+    }
+
+    @Override
+    public void createFile(FileReference fileRef) throws HyracksDataException {
+        bufferCache.createFile(fileRef);
+        createFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void openFile(int fileId) throws HyracksDataException {
+        bufferCache.openFile(fileId);
+        openFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void closeFile(int fileId) throws HyracksDataException {
+        bufferCache.closeFile(fileId);
+        closeFileCount.addAndGet(1);
+    }
+
+    @Override
+    public void deleteFile(int fileId) throws HyracksDataException {
+        bufferCache.deleteFile(fileId);
+        deleteFileCount.addAndGet(1);
+    }
+
+    @Override
+    public ICachedPage tryPin(long dpid) throws HyracksDataException {
+        return bufferCache.tryPin(dpid);
+    }
+
+    @Override
+    public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
+        ICachedPage page = bufferCache.pin(dpid, newPage);
+        pinCount.addAndGet(1);
+        return page;
+    }
+
+    @Override
+    public void unpin(ICachedPage page) throws HyracksDataException {
+        bufferCache.unpin(page);
+        unpinCount.addAndGet(1);
+    }
+
+    @Override
+    public int getPageSize() {
+        return bufferCache.getPageSize();
+    }
+
+    @Override
+    public int getNumPages() {
+        return bufferCache.getNumPages();
+    }
+
+    @Override
+    public void close() {
+        bufferCache.close();
+    }
+
+    public void resetCounters() {
+        pinCount.set(0);
+        unpinCount.set(0);
+        readLatchCount.set(0);
+        readUnlatchCount.set(0);
+        writeLatchCount.set(0);
+        writeUnlatchCount.set(0);
+        createFileCount.set(0);
+        deleteFileCount.set(0);
+        openFileCount.set(0);
+        closeFileCount.set(0);
+    }
+
+    public long getPinCount() {
+        return pinCount.get();
+    }
+
+    public long getUnpinCount() {
+        return unpinCount.get();
+    }
+
+    public long getReadLatchCount() {
+        return readLatchCount.get();
+    }
+
+    public long getReadUnlatchCount() {
+        return readUnlatchCount.get();
+    }
+
+    public long getWriteLatchCount() {
+        return writeLatchCount.get();
+    }
+
+    public long getWriteUnlatchCount() {
+        return writeUnlatchCount.get();
+    }
+
+    public long getCreateFileCount() {
+        return createFileCount.get();
+    }
+
+    public long getDeleteFileCount() {
+        return deleteFileCount.get();
+    }
+
+    public long getOpenFileCount() {
+        return openFileCount.get();
+    }
+
+    public long getCloseFileCount() {
+        return closeFileCount.get();
+    }
+}
diff --git a/hyracks-test-support/.classpath b/hyracks-test-support/.classpath
deleted file mode 100644
index 1f3c1ff..0000000
--- a/hyracks-test-support/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/classes" path="src/main/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-test-support/.project b/hyracks-test-support/.project
deleted file mode 100644
index b0c27d2..0000000
--- a/hyracks-test-support/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-test-support</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-tests/.project b/hyracks-tests/.project
deleted file mode 100644
index 198463d..0000000
--- a/hyracks-tests/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-tests</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.classpath b/hyracks-tests/hyracks-storage-am-btree-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.project b/hyracks-tests/hyracks-storage-am-btree-test/.project
deleted file mode 100644
index bc6bc56..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-btree-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
deleted file mode 100644
index 56ae6e9..0000000
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-
-public abstract class AbstractBTreeTest {
-
-    protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
-
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-
-    protected void print(String str) {
-        System.out.print(str);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
index 4402e22..0f3edd6 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
@@ -16,7 +16,6 @@
 package edu.uci.ics.hyracks.storage.am.btree;
 
 import java.io.DataOutput;
-import java.io.File;
 import java.nio.ByteBuffer;
 import java.util.Random;
 
@@ -31,7 +30,6 @@
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
@@ -39,202 +37,187 @@
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class BTreeFieldPrefixNSMTest extends AbstractBTreeTest {
 
-	private static final int PAGE_SIZE = 32768; // 32K
-	private static final int NUM_PAGES = 40;
-	private static final int MAX_OPEN_FILES = 10;
-	private static final int HYRACKS_FRAME_SIZE = 128;
-	private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-		
-    private ITupleReference createTuple(IHyracksTaskContext ctx, int f0,
-			int f1, int f2, boolean print) throws HyracksDataException {
-		if (print)
-		    LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);		
+    private static final int PAGE_SIZE = 32768; // 32K
+    private static final int NUM_PAGES = 40;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
 
-		ByteBuffer buf = ctx.allocateFrame();
-		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
-		DataOutput dos = tb.getDataOutput();
+    private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
+            throws HyracksDataException {
+        if (print)
+            LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
 
-		ISerializerDeserializer[] recDescSers = {
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE };
-		RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-		IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
-				.getFrameSize(), recDesc);
-		accessor.reset(buf);
-		FrameTupleReference tuple = new FrameTupleReference();
+        ByteBuffer buf = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
+        DataOutput dos = tb.getDataOutput();
 
-		tb.reset();
-		IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-		tb.addFieldEndOffset();
-		IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-		tb.addFieldEndOffset();
-		IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
-		tb.addFieldEndOffset();
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(buf);
+        FrameTupleReference tuple = new FrameTupleReference();
 
-		appender.reset(buf, true);
-		appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
-				.getSize());
+        tb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+        tb.addFieldEndOffset();
 
-		tuple.reset(accessor, 0);
+        appender.reset(buf, true);
+        appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
 
-		return tuple;
-	}
+        tuple.reset(accessor, 0);
 
-	@Test
-	public void test01() throws Exception {
-		
-		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-		IBufferCache bufferCache = TestStorageManagerComponentHolder
-				.getBufferCache(ctx);
-		IFileMapProvider fmp = TestStorageManagerComponentHolder
-				.getFileMapProvider(ctx);
-		FileReference file = new FileReference(new File(fileName));
-		bufferCache.createFile(file);
-		int fileId = fmp.lookupFileId(file);
-		bufferCache.openFile(fileId);
+        return tuple;
+    }
 
-		// declare fields
-		int fieldCount = 3;
-		ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-		typeTraits[0] = new TypeTrait(4);
-		typeTraits[1] = new TypeTrait(4);
-		typeTraits[2] = new TypeTrait(4);
+    @Test
+    public void test01() throws Exception {
 
-		// declare keys
-		int keyFieldCount = 3;
-		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		cmps[2] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        // declare fields
+        int fieldCount = 3;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        typeTraits[2] = new TypeTrait(4);
 
-		// just for printing
-		ISerializerDeserializer[] sers = {
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE,
-				IntegerSerializerDeserializer.INSTANCE };
+        // declare keys
+        int keyFieldCount = 3;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[2] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator cmp = new MultiComparator(cmps);
 
-		Random rnd = new Random();
-		rnd.setSeed(50);
+        // just for printing
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
-		ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(
-				fileId, 0), false);
-		try {
+        Random rnd = new Random();
+        rnd.setSeed(50);
 
-			IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
-			ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-			BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(
-					tupleWriter);
-			frame.setPage(page);
-			frame.initBuffer((byte) 0);
-			slotManager.setFrame(frame);
-			frame.setPrefixTupleCount(0);
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), false);
+        try {
 
-			String before = new String();
-			String after = new String();
+            ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+            BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);            
+            frame.setPage(page);
+            frame.initBuffer((byte) 0);
+            frame.setMultiComparator(cmp);
+            frame.setPrefixTupleCount(0);
 
-			int compactFreq = 5;
-			int compressFreq = 5;
-			int smallMax = 10;
-			int numRecords = 1000;
+            String before = new String();
+            String after = new String();
 
-			int[][] savedFields = new int[numRecords][3];
+            int compactFreq = 5;
+            int compressFreq = 5;
+            int smallMax = 10;
+            int numRecords = 1000;
 
-			// insert records with random calls to compact and compress
-			for (int i = 0; i < numRecords; i++) {
+            int[][] savedFields = new int[numRecords][3];
 
-				if ((i + 1) % 100 == 0)
-					LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
+            // insert records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
 
-				int a = rnd.nextInt() % smallMax;
-				int b = rnd.nextInt() % smallMax;
-				int c = i;
+                if ((i + 1) % 100 == 0)
+                    LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
 
-				ITupleReference tuple = createTuple(ctx, a, b, c, false);
-				try {
-					int targetTupleIndex = frame.findTupleIndex(tuple, cmp);
-					frame.insert(tuple, cmp, targetTupleIndex);
-				} catch (BTreeException e) {
-					e.printStackTrace();
-				} catch (Exception e) {
-					e.printStackTrace();
-				}
+                int a = rnd.nextInt() % smallMax;
+                int b = rnd.nextInt() % smallMax;
+                int c = i;
+                
+                ITupleReference tuple = createTuple(ctx, a, b, c, false);
+                try {
+                    int targetTupleIndex = frame.findInsertTupleIndex(tuple);
+                    frame.insert(tuple, targetTupleIndex);
+                } catch (BTreeException e) {
+                    e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
 
-				savedFields[i][0] = a;
-				savedFields[i][1] = b;
-				savedFields[i][2] = c;
+                savedFields[i][0] = a;
+                savedFields[i][1] = b;
+                savedFields[i][2] = c;
 
-				if (rnd.nextInt() % compactFreq == 0) {
-					before = frame.printKeys(cmp, sers);
-					frame.compact(cmp);
-					after = frame.printKeys(cmp, sers);
-					Assert.assertEquals(before, after);
-				}
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
 
-				if (rnd.nextInt() % compressFreq == 0) {
-					before = frame.printKeys(cmp, sers);
-					frame.compress(cmp);
-					after = frame.printKeys(cmp, sers);
-					Assert.assertEquals(before, after);
-				}
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
 
-			}
+            }
 
-			// delete records with random calls to compact and compress
-			for (int i = 0; i < numRecords; i++) {
+            // delete records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
 
-				if ((i + 1) % 100 == 0)
-					LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
+                if ((i + 1) % 100 == 0)
+                    LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
 
-				ITupleReference tuple = createTuple(ctx,
-						savedFields[i][0], savedFields[i][1],
-						savedFields[i][2], false);
-				try {
-					frame.delete(tuple, cmp, true);
-				} catch (Exception e) {
-				}
+                ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
+                try {
+                    int tupleIndex = frame.findDeleteTupleIndex(tuple);
+                    frame.delete(tuple, tupleIndex);
+                } catch (Exception e) {
+                }
 
-				if (rnd.nextInt() % compactFreq == 0) {
-					before = frame.printKeys(cmp, sers);
-					frame.compact(cmp);
-					after = frame.printKeys(cmp, sers);
-					Assert.assertEquals(before, after);
-				}
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
 
-				if (rnd.nextInt() % compressFreq == 0) {
-					before = frame.printKeys(cmp, sers);
-					frame.compress(cmp);
-					after = frame.printKeys(cmp, sers);
-					Assert.assertEquals(before, after);
-				}
-			}
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    Assert.assertEquals(before, after);
+                }
+            }
 
-		} finally {
-			bufferCache.unpin(page);
-		}
-
-		bufferCache.closeFile(fileId);
-		bufferCache.close();
-	}
+        } finally {
+            bufferCache.unpin(page);
+        }
+    }
+    
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+    
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+    
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+    
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
+    }
 }
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
index 3ef0cc2..7c181c8 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -9,7 +9,7 @@
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
@@ -27,6 +27,8 @@
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
@@ -37,9 +39,9 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexBufferCacheWarmup;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -75,10 +77,10 @@
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -91,8 +93,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -104,7 +106,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -114,7 +116,7 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
         // 10000
         for (int i = 0; i < 100000; i++) {
@@ -149,7 +151,7 @@
         TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
                 btree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
-        LOGGER.info(stats.toString());
+        LOGGER.info("\n" + stats.toString());
 
         TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
                 fileId);
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
index a9debd5..a357f2a 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -15,9 +15,13 @@
 
 package edu.uci.ics.hyracks.storage.am.btree;
 
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
 import java.io.DataOutput;
 import java.io.File;
 import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.Random;
 
 import org.junit.Test;
@@ -25,12 +29,15 @@
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
 import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
@@ -39,6 +46,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
@@ -47,6 +55,8 @@
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
@@ -60,7 +70,6 @@
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.SimpleTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -101,13 +110,13 @@
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -117,8 +126,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -130,7 +139,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -140,7 +149,7 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
         // 10000
         for (int i = 0; i < 10000; i++) {
@@ -159,13 +168,16 @@
 
             tuple.reset(accessor, 0);
 
+            ArrayTupleReference t = new ArrayTupleReference();
+            t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+            
             if (i % 1000 == 0) {
                 long end = System.currentTimeMillis();
                 LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
             }
 
             try {
-                btree.insert(tuple, insertOpCtx);
+                btree.insert(t, insertOpCtx);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -175,7 +187,6 @@
 
         int maxPage = btree.getFreePageManager().getMaxPage(metaFrame);
         LOGGER.info("MAXPAGE: " + maxPage);
-        LOGGER.info(btree.printStats());
 
         long end = System.currentTimeMillis();
         long duration = end - start;
@@ -186,13 +197,13 @@
         LOGGER.info("ORDERED SCAN:");
         ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
         RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
         btree.search(scanCursor, nullPred, searchOpCtx);
         try {
             while (scanCursor.hasNext()) {
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -204,13 +215,13 @@
         // disk-order scan
         LOGGER.info("DISK-ORDER SCAN:");
         TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
-        BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
-        btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+        BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN);
+        btree.diskOrderScan(diskOrderCursor, diskOrderScanOpCtx);
         try {
             while (diskOrderCursor.hasNext()) {
                 diskOrderCursor.next();
                 ITupleReference frameTuple = diskOrderCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -256,7 +267,7 @@
 
         IBinaryComparator[] searchCmps = new IBinaryComparator[1];
         searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
 
         RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
         btree.search(rangeCursor, rangePred, searchOpCtx);
@@ -265,7 +276,7 @@
             while (rangeCursor.hasNext()) {
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -306,16 +317,14 @@
 
         // declare keys
         int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        cmpFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        // SimpleTupleWriterFactory tupleWriterFactory = new
-        // SimpleTupleWriterFactory();
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -325,8 +334,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -338,7 +347,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -348,13 +357,14 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
+        
+        // Magic test number: 3029. 6398. 4875.
         for (int i = 0; i < 10000; i++) {
             int f0 = rnd.nextInt() % 2000;
             int f1 = rnd.nextInt() % 1000;
-            int f2 = 5;
-
+            int f2 = 5;            
+            
             tb.reset();
             IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
             tb.addFieldEndOffset();
@@ -362,7 +372,7 @@
             tb.addFieldEndOffset();
             IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
             tb.addFieldEndOffset();
-
+            
             appender.reset(frame, true);
             appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
 
@@ -371,13 +381,16 @@
             if (i % 1000 == 0) {
                 LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1);
             }
-
+            
             try {
                 btree.insert(tuple, insertOpCtx);
             } catch (Exception e) {
             }
+            
+            //ISerializerDeserializer[] keySerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+            //btree.printTree(leafFrame, interiorFrame, keySerdes);
+            //System.out.println("---------------------------------");
         }
-        // btree.printTree(leafFrame, interiorFrame);
 
         long end = System.currentTimeMillis();
         long duration = end - start;
@@ -387,14 +400,14 @@
         LOGGER.info("ORDERED SCAN:");
         ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
         RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
         btree.search(scanCursor, nullPred, searchOpCtx);
 
         try {
             while (scanCursor.hasNext()) {
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -439,7 +452,7 @@
 
         IBinaryComparator[] searchCmps = new IBinaryComparator[1];
         searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
+        MultiComparator searchCmp = new MultiComparator(searchCmps); // use
         // only
         // a
         // single
@@ -454,8 +467,8 @@
             while (rangeCursor.hasNext()) {
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                LOGGER.info(rec);
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -463,6 +476,7 @@
             rangeCursor.close();
         }
 
+
         btree.close();
         bufferCache.closeFile(fileId);
         bufferCache.close();
@@ -494,15 +508,13 @@
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-        SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
-        // TypeAwareTupleWriterFactory tupleWriterFactory = new
-        // TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -512,8 +524,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -521,7 +533,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
@@ -531,7 +543,7 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
         int maxLength = 10; // max string length to be generated
         for (int i = 0; i < 10000; i++) {
 
@@ -566,14 +578,14 @@
         LOGGER.info("ORDERED SCAN:");
         ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
         RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
         btree.search(scanCursor, nullPred, searchOpCtx);
 
         try {
             while (scanCursor.hasNext()) {
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -619,7 +631,7 @@
 
         IBinaryComparator[] searchCmps = new IBinaryComparator[1];
         searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
 
         RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
         btree.search(rangeCursor, rangePred, searchOpCtx);
@@ -628,7 +640,7 @@
             while (rangeCursor.hasNext()) {
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -669,15 +681,13 @@
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-        // SimpleTupleWriterFactory tupleWriterFactory = new
-        // SimpleTupleWriterFactory();
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -687,8 +697,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -696,7 +706,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
@@ -706,8 +716,8 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-        BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
+        BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE);
 
         int runs = 3;
         for (int run = 0; run < runs; run++) {
@@ -716,6 +726,7 @@
 
             LOGGER.info("INSERTING INTO BTREE");
             int maxLength = 10;
+            //int ins = 16;
             int ins = 10000;
             String[] f0s = new String[ins];
             String[] f1s = new String[ins];
@@ -747,16 +758,14 @@
                     btree.insert(tuple, insertOpCtx);
                     insDone++;
                 } catch (TreeIndexException e) {
-                    // e.printStackTrace();
+                    //e.printStackTrace();
                 } catch (Exception e) {
                     e.printStackTrace();
                 }
 
                 insDoneCmp[i] = insDone;
             }
-            // btree.printTree();
-            // btree.printStats();
-
+            
             LOGGER.info("DELETING FROM BTREE");
             int delDone = 0;
             for (int i = 0; i < ins; i++) {
@@ -780,7 +789,7 @@
                     btree.delete(tuple, deleteOpCtx);
                     delDone++;
                 } catch (TreeIndexException e) {
-                    // e.printStackTrace();
+                    //e.printStackTrace();
                 } catch (Exception e) {
                     e.printStackTrace();
                 }
@@ -804,12 +813,221 @@
         bufferCache.close();
     }
 
+    
+    private void orderedScan(BTree btree, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] recDescSers) throws Exception {
+        // try a simple index scan
+        LOGGER.info("ORDERED SCAN:");
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
+        btree.search(scanCursor, nullPred, searchOpCtx);
+        StringBuilder scanResults = new StringBuilder();
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                scanResults.append("\n" + rec);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+        LOGGER.info(scanResults.toString());
+    }
+    
+    // Assuming exactly two BTree fields.
+    private void compareActualAndExpected(ITreeIndexCursor actualCursor, Map<String, String> expectedValues, ISerializerDeserializer[] fieldSerdes) throws Exception {
+        while (actualCursor.hasNext()) {
+            actualCursor.next();
+            ITupleReference tuple = actualCursor.getTuple();
+            String f0 = (String) deserializeField(tuple, 0, fieldSerdes[0]);
+            String f1 = (String) deserializeField(tuple, 1, fieldSerdes[1]);
+            String expected = expectedValues.get(f0);
+            if (!expected.equals(f1)) {
+                throw new Exception("FAILED: " + f0 + " " + f1 + " " + expected);
+            }
+        }
+    }
+    
+    private Object deserializeField(ITupleReference tuple, int fIdx, ISerializerDeserializer serde) throws HyracksDataException {
+        DataInputStream in = new DataInputStream(new ByteArrayInputStream(tuple.getFieldData(fIdx), tuple.getFieldStart(fIdx), tuple.getFieldLength(fIdx)));
+        return serde.deserialize(in);
+    }
+    
+    // UPDATE TEST
+    // create a B-tree with one variable-length "key" field and one
+    // variable-length "value" field
+    // fill B-tree with random values using insertions, then update entries
+    // one-by-one
+    // repeat procedure a few times on same B-tree
+    @Test
+    public void test05() throws Exception {
+
+        LOGGER.info("DELETION TEST");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(fileName));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+        typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(fieldSerdes);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
+        BTreeOpContext updateOpCtx = btree.createOpContext(IndexOp.UPDATE);
+
+        Map<String, String> expectedValues = new HashMap<String, String>();
+        
+        LOGGER.info("INSERTING INTO BTREE");
+        int maxLength = 10;
+        int ins = 10000;
+        // Only remember the keys.
+        String[] f0s = new String[ins];
+        for (int i = 0; i < ins; i++) {
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+            f0s[i] = f0;
+
+            tb.reset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+
+            ArrayTupleReference t = new ArrayTupleReference();
+            t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+            
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (i % 1000 == 0) {
+                LOGGER.info("INSERTING " + i);
+            }
+            try {
+                btree.insert(t, insertOpCtx);
+                expectedValues.put(f0, f1);
+            } catch (TreeIndexException e) {
+                // e.printStackTrace();
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        ITreeIndexCursor insertCheckCursor = new BTreeRangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
+        btree.search(insertCheckCursor, nullPred, searchOpCtx);
+        try {
+            compareActualAndExpected(insertCheckCursor, expectedValues, fieldSerdes);
+        } finally {
+            insertCheckCursor.close();
+        }
+        
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+
+            LOGGER.info("UPDATE TEST RUN: " + (run + 1) + "/" + runs);
+
+            LOGGER.info("UPDATING BTREE");
+            for (int i = 0; i < ins; i++) {
+                // Generate a new random value for f1.
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                
+                tb.reset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+                tb.addFieldEndOffset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+                tb.addFieldEndOffset();
+
+                appender.reset(frame, true);
+                appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+                tuple.reset(accessor, 0);
+
+                if (i % 1000 == 0) {
+                    LOGGER.info("UPDATING " + i);
+                }
+
+                ArrayTupleReference t = new ArrayTupleReference();
+                t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+                
+                try {
+                    btree.update(t, updateOpCtx);
+                    expectedValues.put(f0s[i], f1);
+                } catch (TreeIndexException e) {
+                    e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+            }
+            
+            ITreeIndexCursor updateCheckCursor = new BTreeRangeSearchCursor(leafFrame);
+            btree.search(updateCheckCursor, nullPred, searchOpCtx);
+            try {
+                compareActualAndExpected(updateCheckCursor, expectedValues, fieldSerdes);
+            } finally {
+                updateCheckCursor.close();
+            }
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+    
     // BULK LOAD TEST
     // insert 100,000 records in bulk
     // B-tree has a composite key to "simulate" non-unique index creation
     // do range search
     @Test
-    public void test05() throws Exception {
+    public void test06() throws Exception {
 
         LOGGER.info("BULK LOAD TEST");
 
@@ -830,16 +1048,14 @@
 
         // declare keys
         int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        cmpFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-        // SimpleTupleWriterFactory tupleWriterFactory = new
-        // SimpleTupleWriterFactory();
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -849,8 +1065,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -858,7 +1074,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -868,7 +1084,7 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
+        IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f);
 
         // generate sorted records
         int ins = 100000;
@@ -889,7 +1105,7 @@
 
             tuple.reset(accessor, 0);
 
-            btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
+            btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
         }
 
         btree.endBulkLoad(bulkLoadCtx);
@@ -936,18 +1152,18 @@
 
         IBinaryComparator[] searchCmps = new IBinaryComparator[1];
         searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
 
         // TODO: check when searching backwards
         RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
         btree.search(rangeCursor, rangePred, searchOpCtx);
 
         try {
             while (rangeCursor.hasNext()) {
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
@@ -965,7 +1181,7 @@
     // demo for Arjun to show easy support of intersection queries on
     // time-intervals
     @Test
-    public void test06() throws Exception {
+    public void test07() throws Exception {
 
         LOGGER.info("TIME-INTERVAL INTERSECTION DEMO");
 
@@ -986,15 +1202,14 @@
 
         // declare keys
         int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+        cmpFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+        
+        MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-        // SimpleTupleWriterFactory tupleWriterFactory = new
-        // SimpleTupleWriterFactory();
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);        
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
@@ -1004,8 +1219,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -1013,7 +1228,7 @@
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -1058,7 +1273,7 @@
         intervals[9][0] = 20;
         intervals[9][1] = 35;
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
         // int exceptionCount = 0;
         for (int i = 0; i < intervalCount; i++) {
@@ -1098,15 +1313,16 @@
         LOGGER.info("ORDERED SCAN:");
         ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
         RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
         btree.search(scanCursor, nullPred, searchOpCtx);
 
         try {
             while (scanCursor.hasNext()) {
                 scanCursor.next();
                 ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                // TODO: fix me.
+                //print(rec + "\n");
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -1156,7 +1372,7 @@
         IBinaryComparator[] searchCmps = new IBinaryComparator[2];
         searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
         searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
 
         RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
         btree.search(rangeCursor, rangePred, searchOpCtx);
@@ -1165,7 +1381,7 @@
             while (rangeCursor.hasNext()) {
                 rangeCursor.next();
                 ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
                 LOGGER.info(rec);
             }
         } catch (Exception e) {
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
new file mode 100644
index 0000000..ef1e5e3
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
@@ -0,0 +1,119 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+
+@SuppressWarnings("rawtypes")
+public abstract class BTreeTestDriver extends AbstractBTreeTest {
+
+    protected static final int numTuplesToInsert = 10000;
+    
+    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception;
+    protected abstract String getTestOpName();
+    
+    @Test
+    public void oneIntKeyAndValue() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
+                
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        // Range search in [-1000, 1000]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
+        
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
+    }
+    
+    @Test
+    public void twoIntKeys() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
+        
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        
+        // Range search in [50 0, 50 500]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
+        
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+    
+    @Test
+    public void twoIntKeysAndValues() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
+        
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        
+        // Range search in [50 100, 100 100]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
+        
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }        
+    
+    @Test
+    public void oneStringKeyAndValue() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", cc7"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
+    }
+    
+    @Test
+    public void twoStringKeys() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+        
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+    
+    @Test
+    public void twoStringKeysAndValues() throws Exception {        
+        LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+        
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
new file mode 100644
index 0000000..49d08a3
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class BulkLoadTest extends BTreeTestDriver {
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.bulkLoadIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.bulkLoadStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+
+        BTreeTestUtils.checkPointSearches(testCtx);
+        BTreeTestUtils.checkOrderedScan(testCtx);
+        BTreeTestUtils.checkDiskOrderScan(testCtx);
+        BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "BulkLoad";
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
new file mode 100644
index 0000000..2157adb
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class DeleteTest extends BTreeTestDriver {
+    
+    private static final int numInsertRounds = 3;
+    private static final int numDeleteRounds = 3;
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+        for (int i = 0; i < numInsertRounds; i++) {
+            
+            // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+                BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+            }
+            
+            int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numDeleteRounds);
+            for(int j = 0; j < numDeleteRounds; j++) {
+                BTreeTestUtils.deleteTuples(testCtx, numTuplesPerDeleteRound, rnd);
+                
+                BTreeTestUtils.checkPointSearches(testCtx);
+                BTreeTestUtils.checkOrderedScan(testCtx);
+                BTreeTestUtils.checkDiskOrderScan(testCtx);
+                BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+                if (prefixLowKey != null && prefixHighKey != null) {
+                    BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+                }
+            }
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Delete";
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
new file mode 100644
index 0000000..fa3f895
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples.
+ * We compare the following operations against expected results:
+ * 1. Point searches for all tuples.
+ * 2. Ordered scan.
+ * 3. Disk-order scan.
+ * 4. Range search (and prefix search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public class InsertTest extends BTreeTestDriver {        
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+        
+        BTreeTestUtils.checkPointSearches(testCtx);
+        BTreeTestUtils.checkOrderedScan(testCtx);
+        BTreeTestUtils.checkDiskOrderScan(testCtx);
+                
+        BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+        }
+        testCtx.btree.close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Insert";
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
index 7dada06..14246f7 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
@@ -18,9 +18,6 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Random;
@@ -29,32 +26,28 @@
 import org.junit.Before;
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
 import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
@@ -65,89 +58,55 @@
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class RangeSearchCursorTest extends AbstractBTreeTest {
-	private static final int PAGE_SIZE = 256;
-	private static final int NUM_PAGES = 10;
-	private static final int MAX_OPEN_FILES = 10;
-	private static final int HYRACKS_FRAME_SIZE = 128;	
-
-	// declare fields
+	// Declare fields
 	int fieldCount = 2;
 	ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
 
 	TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
 			typeTraits);
-	ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
-			tupleWriterFactory);
-	ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
-			tupleWriterFactory);
 	ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-	IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
-	IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
 	ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
-    IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-	ByteBuffer frame = ctx.allocateFrame();
-	FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-
-	ISerializerDeserializer[] recDescSers = {
-			IntegerSerializerDeserializer.INSTANCE,
-			IntegerSerializerDeserializer.INSTANCE };
-	RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-	IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(),
-			recDesc);
-	FrameTupleReference tuple = new FrameTupleReference();
-
 	Random rnd = new Random(50);
 
 	@Before
-	public void setUp() {
-		typeTraits[0] = new TypeTrait(4);
+	public void setUp() throws HyracksDataException {
+		super.setUp();
+	    typeTraits[0] = new TypeTrait(4);
 		typeTraits[1] = new TypeTrait(4);
-		accessor.reset(frame);
 	}
 
 	@Test
 	public void uniqueIndexTest() throws Exception {
-
 	    LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
 
-		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-		IBufferCache bufferCache = TestStorageManagerComponentHolder
-				.getBufferCache(ctx);
-		IFileMapProvider fmp = TestStorageManagerComponentHolder
-				.getFileMapProvider(ctx);
-		FileReference file = new FileReference(new File(fileName));
-		bufferCache.createFile(file);
-		int fileId = fmp.lookupFileId(file);
-		bufferCache.openFile(fileId);
-
 		// declare keys
 		int keyFieldCount = 1;
-		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
+		IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+		cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+		ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+	            tupleWriterFactory);
+	    ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+	            tupleWriterFactory);
 		
-		BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
-				leafFrameFactory, cmp);
-		btree.create(fileId, leafFrame, metaFrame);
-		btree.open(fileId);
+	    IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+	    IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
+	    
+		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+		
+		BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+		btree.create(btreeFileId);
+		btree.open(btreeFileId);
 
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-		DataOutput dos = tb.getDataOutput();
+		ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+	    ArrayTupleReference tuple = new ArrayTupleReference();
 
-		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
 		// generate keys
 		int numKeys = 50;
@@ -165,18 +124,8 @@
 		// insert keys into btree
 		for (int i = 0; i < keys.size(); i++) {
 
-			tb.reset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i)
-					.intValue(), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-			tb.addFieldEndOffset();
-
-			appender.reset(frame, true);
-			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
-					.getSize());
-
-			tuple.reset(accessor, 0);
+		    TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);		    
+			tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
 			try {
 				btree.insert(tuple, insertOpCtx);
@@ -212,47 +161,38 @@
 				maxSearchKey, false, true, true, false);
 
 		btree.close();
-		bufferCache.closeFile(fileId);
-		bufferCache.close();
 	}
 
 	@Test
 	public void nonUniqueIndexTest() throws Exception {
-
 	    LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
 
-		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-		IBufferCache bufferCache = TestStorageManagerComponentHolder
-				.getBufferCache(ctx);
-		IFileMapProvider fmp = TestStorageManagerComponentHolder
-				.getFileMapProvider(ctx);
-		FileReference file = new FileReference(new File(fileName));
-		bufferCache.createFile(file);
-		int fileId = fmp.lookupFileId(file);
-		bufferCache.openFile(fileId);
-
 		// declare keys
 		int keyFieldCount = 2;
-		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
+		IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+		cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+		cmpFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+                tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+                tupleWriterFactory);
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
+        
+		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 		
-		BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
-				leafFrameFactory, cmp);
-		btree.create(fileId, leafFrame, metaFrame);
-		btree.open(fileId);
+		BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+		btree.create(btreeFileId);
+		btree.open(btreeFileId);
 
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-		DataOutput dos = tb.getDataOutput();
+		ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
 
-		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
 		// generate keys
 		int numKeys = 50;
@@ -267,18 +207,8 @@
 		// insert keys into btree
 		for (int i = 0; i < keys.size(); i++) {
 
-			tb.reset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i)
-					.intValue(), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-			tb.addFieldEndOffset();
-
-			appender.reset(frame, true);
-			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
-					.getSize());
-
-			tuple.reset(accessor, 0);
+		    TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);          
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
 			try {
 				btree.insert(tuple, insertOpCtx);
@@ -314,51 +244,38 @@
 				maxSearchKey, false, true, true, false);
 
 		btree.close();
-		bufferCache.closeFile(fileId);
-		bufferCache.close();
 	}
 
 	@Test
 	public void nonUniqueFieldPrefixIndexTest() throws Exception {
-
 	    LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
 
-		ITreeIndexFrameFactory leafFrameFactory = new BTreeFieldPrefixNSMLeafFrameFactory(
-				tupleWriterFactory);
-		IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
-
-		TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-		IBufferCache bufferCache = TestStorageManagerComponentHolder
-				.getBufferCache(ctx);
-		IFileMapProvider fmp = TestStorageManagerComponentHolder
-				.getFileMapProvider(ctx);
-		FileReference file = new FileReference(new File(fileName));
-		bufferCache.createFile(file);
-		int fileId = fmp.lookupFileId(file);
-		bufferCache.openFile(fileId);
-
 		// declare keys
 		int keyFieldCount = 2;
-		IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-		cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
-		cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
-				.createBinaryComparator();
+		IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+		cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+		cmpFactories[1] = IntegerBinaryComparatorFactory.INSTANCE	;			
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+		MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
 
-		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);		
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+                tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+                tupleWriterFactory);
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
+
+		IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);		
 		
-		BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
-				leafFrameFactory, cmp);
-		btree.create(fileId, leafFrame, metaFrame);
-		btree.open(fileId);
+		BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+		btree.create(btreeFileId);
+		btree.open(btreeFileId);
 
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-		DataOutput dos = tb.getDataOutput();
+		ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
 
-		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT);
 
 		// generate keys
 		int numKeys = 50;
@@ -373,18 +290,8 @@
 		// insert keys into btree
 		for (int i = 0; i < keys.size(); i++) {
 
-			tb.reset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i)
-					.intValue(), dos);
-			tb.addFieldEndOffset();
-			IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-			tb.addFieldEndOffset();
-
-			appender.reset(frame, true);
-			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
-					.getSize());
-
-			tuple.reset(accessor, 0);
+		    TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);          
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
 			try {
 				btree.insert(tuple, insertOpCtx);
@@ -420,51 +327,20 @@
 				maxSearchKey, false, true, true, false);
 
 		btree.close();
-		bufferCache.closeFile(fileId);
-		bufferCache.close();
 	}
 
 	public RangePredicate createRangePredicate(int lk, int hk,
 			boolean isForward, boolean lowKeyInclusive,
-			boolean highKeyInclusive, MultiComparator cmp,
-			ITypeTrait[] typeTraits) throws HyracksDataException {
-		// build low and high keys
-		ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
-		DataOutput kdos = ktb.getDataOutput();
-
-		ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
-		RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-		IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
-				.getFrameSize(), keyDesc);
-		keyAccessor.reset(frame);
-
-		appender.reset(frame, true);
-
-		// build and append low key
-		ktb.reset();
-		IntegerSerializerDeserializer.INSTANCE.serialize(lk, kdos);
-		ktb.addFieldEndOffset();
-		appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
-				.getSize());
-
-		// build and append high key
-		ktb.reset();
-		IntegerSerializerDeserializer.INSTANCE.serialize(hk, kdos);
-		ktb.addFieldEndOffset();
-		appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
-				.getSize());
+			boolean highKeyInclusive, MultiComparator cmp) throws HyracksDataException {
 
 		// create tuplereferences for search keys
-		FrameTupleReference lowKey = new FrameTupleReference();
-		lowKey.reset(keyAccessor, 0);
-
-		FrameTupleReference highKey = new FrameTupleReference();
-		highKey.reset(keyAccessor, 1);
+		ITupleReference lowKey = TupleUtils.createIntegerTuple(lk);
+		ITupleReference highKey = TupleUtils.createIntegerTuple(hk);
 
 		IBinaryComparator[] searchCmps = new IBinaryComparator[1];
 		searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
 				.createBinaryComparator();
-		MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+		MultiComparator searchCmp = new MultiComparator(searchCmps);
 
 		RangePredicate rangePred = new RangePredicate(isForward, lowKey,
 				highKey, lowKeyInclusive, highKeyInclusive, searchCmp,
@@ -532,10 +408,8 @@
 				ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
 				RangePredicate rangePred = createRangePredicate(lowKey,
 						highKey, isForward, lowKeyInclusive, highKeyInclusive,
-						btree.getMultiComparator(), btree.getMultiComparator()
-								.getTypeTraits());
-				BTreeOpContext searchOpCtx = btree.createOpContext(
-						IndexOp.SEARCH, leafFrame, interiorFrame, null);
+						btree.getMultiComparator());
+				BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH);
 				btree.search(rangeCursor, rangePred, searchOpCtx);
 
 				try {
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
index 39bedac..7e4b2e3 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -22,23 +22,21 @@
 
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.storage.common.sync.LatchType;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class StorageManagerTest extends AbstractBTreeTest {
     private static final int PAGE_SIZE = 256;
     private static final int NUM_PAGES = 10;
     private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksTaskContext ctx = TestUtils.create(32768);
+    private static final int HYRACKS_FRAME_SIZE = 32768;
 
     public class PinnedLatchedPage {
         public final ICachedPage page;
@@ -255,4 +253,20 @@
 
         bufferCache.close();
     }
+    
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+    
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+    
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+    
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
+    }
 }
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
new file mode 100644
index 0000000..b40539f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class UpdateTest extends BTreeTestDriver {
+    private static final int numUpdateRounds = 3;
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        // This is a noop because we can only update non-key fields.
+        if (fieldSerdes.length == numKeys) {
+            return;
+        }
+        
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+
+        int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numUpdateRounds);
+        for(int j = 0; j < numUpdateRounds; j++) {
+            BTreeTestUtils.updateTuples(testCtx, numTuplesPerDeleteRound, rnd);
+
+            BTreeTestUtils.checkPointSearches(testCtx);
+            BTreeTestUtils.checkOrderedScan(testCtx);
+            BTreeTestUtils.checkDiskOrderScan(testCtx);
+            BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+            if (prefixLowKey != null && prefixHighKey != null) {
+                BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+            }
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Update";
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
new file mode 100644
index 0000000..dff1e9c
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -0,0 +1,76 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractBTreeTest {
+    protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
+    public static final long RANDOM_SEED = 50;
+    
+    private static final int PAGE_SIZE = 256;
+    private static final int NUM_PAGES = 10;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+        
+    protected IHyracksTaskContext ctx; 
+    protected IBufferCache bufferCache;
+    protected int btreeFileId;
+    
+    protected final Random rnd = new Random();
+    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final static String sep = System.getProperty("file.separator");
+    protected String fileName;    
+    
+    @Before
+    public void setUp() throws HyracksDataException {
+        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(getPageSize(), getNumPages(), getMaxOpenFiles());
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(fileName));
+        bufferCache.createFile(file);
+        btreeFileId = fmp.lookupFileId(file);
+        bufferCache.openFile(btreeFileId);
+        rnd.setSeed(RANDOM_SEED);
+    }
+    
+    @After
+    public void tearDown() throws HyracksDataException {
+        bufferCache.closeFile(btreeFileId);
+        bufferCache.close();
+        File f = new File(fileName);
+        f.deleteOnExit();
+    }
+    
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+    
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+    
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+    
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
new file mode 100644
index 0000000..c56f1b6
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.util.TreeSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public final class BTreeTestContext {    
+    public final ISerializerDeserializer[] fieldSerdes;
+    public final IBufferCache bufferCache;
+    public final BTree btree;
+    public final IBTreeLeafFrame leafFrame;
+    public final IBTreeInteriorFrame interiorFrame;
+    public final ITreeIndexMetaDataFrame metaFrame;    
+    public final ArrayTupleBuilder tupleBuilder;
+    public final ArrayTupleReference tuple = new ArrayTupleReference();
+    public final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
+    public final BTreeOpContext opCtx;
+    
+    public BTreeTestContext(IBufferCache bufferCache, ISerializerDeserializer[] fieldSerdes, BTree btree, IBTreeLeafFrame leafFrame,
+            IBTreeInteriorFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame, BTreeOpContext opCtx) {
+        this.bufferCache = bufferCache;
+        this.fieldSerdes = fieldSerdes;
+        this.btree = btree;
+        this.leafFrame = leafFrame;
+        this.interiorFrame = interiorFrame;
+        this.metaFrame = metaFrame;
+        this.opCtx = opCtx;
+        this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
+    }
+    
+    public int getFieldCount() {
+        return fieldSerdes.length;
+    }
+    
+    public int getKeyFieldCount() {
+        return btree.getMultiComparator().getKeyFieldCount();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
new file mode 100644
index 0000000..8e7a43b
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
@@ -0,0 +1,499 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.util.Iterator;
+import java.util.NavigableSet;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class BTreeTestUtils {
+    private static final Logger LOGGER = Logger.getLogger(BTreeTestUtils.class.getName());    
+    
+    public static BTreeTestContext createBTreeTestContext(IBufferCache bufferCache, int btreeFileId, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType) throws Exception {        
+        ITypeTrait[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, fieldSerdes.length);
+        IBinaryComparator[] cmps = SerdeUtils.serdesToComparators(fieldSerdes, numKeyFields);
+        
+        BTree btree = BTreeUtils.createBTree(bufferCache, btreeFileId, typeTraits, cmps, leafType);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+        // Set an arbitrary index op in the context. Will be reset anyway.
+        BTreeOpContext opCtx = btree.createOpContext(IndexOp.SEARCH);
+        
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) btree.getInteriorFrameFactory().createFrame();
+        ITreeIndexMetaDataFrame metaFrame = btree.getFreePageManager().getMetaDataFrameFactory().createFrame();
+        BTreeTestContext testCtx = new BTreeTestContext(bufferCache, fieldSerdes, btree, leafFrame, interiorFrame, metaFrame, opCtx);
+        return testCtx;
+    }
+    
+    private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        for (int i = 0; i < fieldSerdes.length; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(
+                    actual.getFieldData(i), actual.getFieldStart(i),
+                    actual.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object actualObj = fieldSerdes[i].deserialize(dataIn);            
+            if (!actualObj.equals(expected.get(i))) {
+                fail("Actual and expected fields do not match.\nExpected: " + expected.get(i) + "\nActual  : " + actualObj);
+            }
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    private static CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes, int numKeys) throws HyracksDataException {
+        CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, numKeys);
+        int numFields = Math.min(fieldSerdes.length, tuple.getFieldCount());
+        for (int i = 0; i < numFields; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(
+                    tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Comparable fieldObj = (Comparable)fieldSerdes[i].deserialize(dataIn);
+            checkTuple.add(fieldObj);
+        }
+        return checkTuple;
+    }
+    
+    @SuppressWarnings("unchecked")
+    private static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        int fieldCount = tupleBuilder.getFieldEndOffsets().length; 
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (int i = 0; i < fieldCount; i++) {
+            fieldSerdes[i].serialize(checkTuple.get(i), dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+    
+    public static void checkOrderedScan(BTreeTestContext testCtx) throws Exception {
+        LOGGER.info("Testing Ordered Scan.");
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(testCtx.leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        testCtx.opCtx.reset(IndexOp.SEARCH);
+        testCtx.btree.search(scanCursor, nullPred, testCtx.opCtx);
+        Iterator<CheckTuple> checkIter = testCtx.checkTuples.iterator();
+        int actualCount = 0;
+        try {
+            while (scanCursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Ordered scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size());
+                }
+                scanCursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = scanCursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
+                actualCount++;
+            }
+            if (actualCount < testCtx.checkTuples.size()) {
+                fail("Ordered scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            scanCursor.close();
+        }
+    }
+    
+    public static void checkDiskOrderScan(BTreeTestContext testCtx) throws Exception {
+        LOGGER.info("Testing Disk-Order Scan.");
+        ITreeIndexCursor diskOrderCursor = new TreeDiskOrderScanCursor(testCtx.leafFrame);
+        testCtx.opCtx.reset(IndexOp.DISKORDERSCAN);
+        testCtx.btree.diskOrderScan(diskOrderCursor, testCtx.opCtx);
+        int actualCount = 0;        
+        try {
+            while (diskOrderCursor.hasNext()) {
+                diskOrderCursor.next();
+                ITupleReference tuple = diskOrderCursor.getTuple();
+                CheckTuple checkTuple = createCheckTupleFromTuple(tuple, testCtx.fieldSerdes, testCtx.btree.getMultiComparator().getKeyFieldCount());
+                if (!testCtx.checkTuples.contains(checkTuple)) {
+                    fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
+                }
+                actualCount++;
+            }
+            if (actualCount < testCtx.checkTuples.size()) {
+                fail("Disk-order scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+            if (actualCount > testCtx.checkTuples.size()) {
+                fail("Disk-order scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            diskOrderCursor.close();
+        }
+    }
+    
+    public static void checkRangeSearch(BTreeTestContext testCtx, ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
+        LOGGER.info("Testing Range Search.");
+        MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
+        MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
+        ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame);
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
+        testCtx.opCtx.reset(IndexOp.SEARCH);
+        testCtx.btree.search(searchCursor, rangePred, testCtx.opCtx);
+        // Get the subset of elements from the expected set within given key range.
+        CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, testCtx.fieldSerdes, lowKeyCmp.getKeyFieldCount());
+        CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, testCtx.fieldSerdes, highKeyCmp.getKeyFieldCount());
+        NavigableSet<CheckTuple> expectedSubset = null;
+        if (lowKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount() || 
+                highKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount()) {
+            // Searching on a key prefix (low key or high key or both).
+            expectedSubset = getPrefixExpectedSubset(testCtx.checkTuples, lowKeyCheck, highKeyCheck);
+        } else {
+            // Searching on all key fields.
+            expectedSubset = testCtx.checkTuples.subSet(lowKeyCheck, lowKeyInclusive, highKeyCheck, highKeyInclusive);
+        }
+        Iterator<CheckTuple> checkIter = expectedSubset.iterator();
+        int actualCount = 0;
+        try {
+            while (searchCursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
+                }
+                searchCursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = searchCursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
+                actualCount++;
+            }
+            if (actualCount < expectedSubset.size()) {
+                fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            searchCursor.close();
+        }
+    }
+    
+    public static void checkPointSearches(BTreeTestContext testCtx) throws Exception {
+        LOGGER.info("Testing Point Searches On All Expected Keys.");        
+        ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame);
+        
+        ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        testCtx.opCtx.reset(IndexOp.SEARCH);
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, null, null);
+
+        // Iterate through expected tuples, and perform a point search in the BTree to verify the tuple can be reached.
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, testCtx.fieldSerdes);
+            createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, testCtx.fieldSerdes);
+            MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
+            MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
+                        
+            rangePred.setLowKey(lowKey, true);
+            rangePred.setHighKey(highKey, true);
+            rangePred.setLowKeyComparator(lowKeyCmp);
+            rangePred.setHighKeyComparator(highKeyCmp);
+            
+            testCtx.btree.search(searchCursor, rangePred, testCtx.opCtx);
+            
+            try {
+                // We expect exactly one answer.
+                if (searchCursor.hasNext()) {
+                    searchCursor.next();
+                    ITupleReference tuple = searchCursor.getTuple();
+                    compareActualAndExpected(tuple, checkTuple, testCtx.fieldSerdes);
+                }
+                if (searchCursor.hasNext()) {
+                    fail("Point search returned more than one answer.");
+                }
+            } finally {
+                searchCursor.close();
+            }
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    // Create a new TreeSet containing the elements satisfying the prefix search.
+    // Implementing prefix search by changing compareTo() in CheckTuple does not work.
+    public static TreeSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey, CheckTuple highKey) {
+        TreeSet<CheckTuple> expectedSubset = new TreeSet<CheckTuple>();
+        Iterator<CheckTuple> iter = checkTuples.iterator();
+        while(iter.hasNext()) {
+            CheckTuple t = iter.next();
+            boolean geLowKey = true;
+            boolean leHighKey = true;
+            for (int i = 0; i < lowKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(lowKey.get(i)) < 0) {
+                    geLowKey = false;
+                    break;
+                }
+            }
+            for (int i = 0; i < highKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(highKey.get(i)) > 0) {
+                    leHighKey = false;
+                    break;
+                }
+            }
+            if (geLowKey && leHighKey) {
+                expectedSubset.add(t);
+            }
+        }
+        return expectedSubset;
+    }
+    
+    public static void insertIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int numFields = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        
+        testCtx.opCtx.reset(IndexOp.INSERT);
+        
+        int[] tupleValues = new int[testCtx.getFieldCount()];
+        // Scale range of values according to number of keys. 
+        // For example, for 2 keys we want the square root of numTuples, for 3 keys the cube root of numTuples, etc.        
+        int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                tupleValues[j] = rnd.nextInt() % maxValue;
+            }
+            // Set values.
+            for (int j = numKeyFields; j < numFields; j++) {
+                tupleValues[j] = j;
+            }
+            TupleUtils.createIntegerTuple(testCtx.tupleBuilder, testCtx.tuple, tupleValues);
+            if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+            }
+            try {
+                testCtx.btree.insert(testCtx.tuple, testCtx.opCtx);
+                // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
+                CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(numFields, numKeyFields);
+                for(int v : tupleValues) {
+                    checkTuple.add(v);
+                }
+                testCtx.checkTuples.add(checkTuple);
+            } catch (BTreeDuplicateKeyException e) {
+                // Ignore duplicate key insertions.
+            }
+        }
+    }
+    
+    public static void insertStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int numFields = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        
+        testCtx.opCtx.reset(IndexOp.INSERT);
+
+        Object[] tupleValues = new Object[numFields];
+        for (int i = 0; i < numTuples; i++) {
+            if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+            }
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                tupleValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < numFields; j++) {
+                tupleValues[j] = getRandomString(5, rnd);
+            }
+            TupleUtils.createTuple(testCtx.tupleBuilder, testCtx.tuple, testCtx.fieldSerdes, tupleValues);
+            try {
+                testCtx.btree.insert(testCtx.tuple, testCtx.opCtx);
+                // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
+                CheckTuple<String> checkTuple = new CheckTuple<String>(numFields, numKeyFields);
+                for(Object v : tupleValues) {
+                    checkTuple.add((String)v);
+                }
+                testCtx.checkTuples.add(checkTuple);
+            } catch (BTreeDuplicateKeyException e) {
+                // Ignore duplicate key insertions.
+            }
+        }
+    }
+
+    public static void bulkLoadIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int numFields = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        int[] tupleValues = new int[testCtx.getFieldCount()];
+        int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                tupleValues[j] = rnd.nextInt() % maxValue;
+            }
+            // Set values.
+            for (int j = numKeyFields; j < numFields; j++) {
+                tupleValues[j] = j;
+            }
+            
+            // Set expected values. We also use these as the pre-sorted stream for bulk loading.
+            CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(numFields, numKeyFields);
+            for(int v : tupleValues) {
+                checkTuple.add(v);
+            }            
+            testCtx.checkTuples.add(checkTuple);
+        }
+        
+        bulkLoadCheckTuples(testCtx, numTuples);
+    }
+    
+    public static void bulkLoadStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int numFields = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        String[] tupleValues = new String[numFields];
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                tupleValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < numFields; j++) {
+                tupleValues[j] = getRandomString(5, rnd);
+            }
+            // Set expected values. We also use these as the pre-sorted stream for bulk loading.
+            CheckTuple<String> checkTuple = new CheckTuple<String>(numFields, numKeyFields);
+            for(String v : tupleValues) {
+                checkTuple.add(v);
+            }            
+            testCtx.checkTuples.add(checkTuple);
+        }
+        
+        bulkLoadCheckTuples(testCtx, numTuples);
+    }
+    
+    private static void bulkLoadCheckTuples(BTreeTestContext testCtx, int numTuples) throws HyracksDataException, TreeIndexException {
+        int numFields = testCtx.getFieldCount();
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(numFields);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        // Perform bulk load.
+        IIndexBulkLoadContext bulkLoadCtx = testCtx.btree.beginBulkLoad(0.7f);
+        int c = 1;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            if (c % (numTuples / 10) == 0) {
+                LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+            }
+            createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, testCtx.fieldSerdes);
+            testCtx.btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
+            c++;
+        }
+        testCtx.btree.endBulkLoad(bulkLoadCtx);
+    }
+    
+    public static void deleteTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        ArrayTupleBuilder deleteTupleBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference deleteTuple = new ArrayTupleReference();
+        int numCheckTuples = testCtx.checkTuples.size();        
+        testCtx.opCtx.reset(IndexOp.DELETE);
+        // Copy CheckTuple references into array, so we can randomly pick from there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            checkTuples[idx++] = checkTuple;
+        }
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];            
+            createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, testCtx.fieldSerdes);          
+            testCtx.btree.delete(deleteTuple, testCtx.opCtx);
+            
+            // Remove check tuple from expected results.
+            testCtx.checkTuples.remove(checkTuple);
+            
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    public static void updateTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.btree.getFieldCount();
+        int keyFieldCount = testCtx.btree.getMultiComparator().getKeyFieldCount();
+        // This is a noop because we can only update non-key fields.
+        if (fieldCount == keyFieldCount) {
+            return;
+        }
+        ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference updateTuple = new ArrayTupleReference();
+        int numCheckTuples = testCtx.checkTuples.size();
+        testCtx.opCtx.reset(IndexOp.UPDATE);
+        // Copy CheckTuple references into array, so we can randomly pick from there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            checkTuples[idx++] = checkTuple;
+        }
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];
+            // Update check tuple's non-key fields.
+            for (int j = keyFieldCount; j < fieldCount; j++) {
+                Comparable newValue = getRandomUpdateValue(testCtx.fieldSerdes[j], rnd);
+                checkTuple.set(j, newValue);
+            }
+            
+            createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, testCtx.fieldSerdes);            
+            testCtx.btree.update(updateTuple, testCtx.opCtx);
+            
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+    
+    private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return Integer.valueOf(rnd.nextInt());
+        } else if (serde instanceof UTF8StringSerializerDeserializer) {
+            return getRandomString(10, rnd);
+        }
+        return null;
+    }
+    
+    public static String getRandomString(int length, Random rnd) {
+        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
new file mode 100644
index 0000000..f945ab9
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+@SuppressWarnings({"rawtypes", "unchecked"})
+public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
+    private final int numKeys;    
+    private final Comparable[] tuple;
+    private int pos;
+
+    public CheckTuple(int numFields, int numKeys) {
+        this.numKeys = numKeys;
+        this.tuple = new Comparable[numFields];
+        pos = 0;
+    }
+
+    public void add(T e) {
+        tuple[pos++] = e;
+    }
+
+    @Override
+    public int compareTo(T o) {
+        CheckTuple<T> other = (CheckTuple<T>)o;
+        for (int i = 0; i < numKeys; i++) {            
+            int cmp = tuple[i].compareTo(other.get(i));
+            if (cmp != 0) {
+                return cmp;
+            }
+        }
+        return 0;
+    }
+
+    public T get(int idx) {
+        return (T)tuple[idx];
+    }
+    
+    public void set(int idx, T e) {
+        tuple[idx] = e;
+    }
+    
+    public int size() {
+        return tuple.length;
+    }
+    
+    public int getNumKeys() {
+        return numKeys;
+    }
+    
+    @Override
+    public String toString() {
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < tuple.length; i++) {
+            strBuilder.append(tuple[i].toString());
+            if (i != tuple.length-1) {
+                strBuilder.append(" ");
+            }
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath b/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.project b/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
deleted file mode 100644
index f60b2f9..0000000
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-invertedindex-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
index ea97511..fa1833a 100644
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
@@ -71,7 +71,7 @@
     // declare btree keys
     protected int btreeKeyFieldCount = 1;
     protected IBinaryComparator[] btreeBinCmps = new IBinaryComparator[btreeKeyFieldCount];
-    protected MultiComparator btreeCmp = new MultiComparator(typeTraits, btreeBinCmps);
+    protected MultiComparator btreeCmp = new MultiComparator(btreeBinCmps);
 
     // btree frame factories
     protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
@@ -97,7 +97,7 @@
 
     protected int invListKeys = 1;
     protected IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
-    protected MultiComparator invListCmp = new MultiComparator(invListTypeTraits, invListBinCmps);
+    protected MultiComparator invListCmp = new MultiComparator(invListBinCmps);
 
     protected InvertedIndex invIndex;
 
@@ -168,8 +168,8 @@
 
         freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, btreeCmp);
-        btree.create(btreeFileId, leafFrame, metaFrame);
+        btree = new BTree(bufferCache, fieldCount, btreeCmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
         btree.open(btreeFileId);
 
         // --- INVERTED INDEX ---
@@ -181,7 +181,7 @@
         invListTypeTraits[0] = new TypeTrait(4);
         invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
 
-        invIndex = new InvertedIndex(bufferCache, btree, invListCmp);
+        invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
         invIndex.open(invListsFileId);
 
         rnd.setSeed(50);
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
index bdd8c4a..86b3107 100644
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
@@ -123,7 +123,7 @@
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
         cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
 
-        MultiComparator btreeCmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator btreeCmp = new MultiComparator(cmps);
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -136,8 +136,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, btreeCmp);
-        btree.create(btreeFileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, btreeCmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
         btree.open(btreeFileId);
 
         int invListFields = 1;
@@ -148,9 +148,9 @@
         IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
         invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
 
-        MultiComparator invListCmp = new MultiComparator(invListTypeTraits, invListBinCmps);
+        MultiComparator invListCmp = new MultiComparator(invListBinCmps);
 
-        InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListCmp);
+        InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
         invIndex.open(invListsFileId);
 
         Random rnd = new Random();
@@ -241,7 +241,7 @@
         IFrameTupleAccessor tokenAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), tokenRecDesc);
         tokenAccessor.reset(frame);
 
-        BTreeOpContext btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        BTreeOpContext btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH);
 
         // verify created inverted lists one-by-one
         for (int i = 0; i < tokens.size(); i++) {
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
index 4ef8855..e6dba19 100644
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
@@ -27,6 +27,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
@@ -65,7 +66,7 @@
 		loadData();
 	}
 
-	public void loadData() throws HyracksDataException {
+	public void loadData() throws HyracksDataException, TreeIndexException {
 		tokens.add("compilers");
 		tokens.add("computer");
 		tokens.add("databases");
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
index c87ce9b..91fb924 100644
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -14,6 +14,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
@@ -112,7 +113,7 @@
 		}
 	}
 
-	public void loadData() throws IOException {
+	public void loadData() throws IOException, TreeIndexException {
 		List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
 		// generate pairs for subsequent sorting and bulk-loading
 		int id = 0;
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/.classpath b/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/.project b/hyracks-tests/hyracks-storage-am-rtree-test/.project
deleted file mode 100644
index ea7e36b..0000000
--- a/hyracks-tests/hyracks-storage-am-rtree-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-rtree-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
index 907d5dc..ce3bd3c 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
@@ -39,8 +39,9 @@
 import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
@@ -50,23 +51,21 @@
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class RTreeTest extends AbstractRTreeTest {
-
 	private static final int PAGE_SIZE = 256;
 	private static final int NUM_PAGES = 10;
 	private static final int MAX_OPEN_FILES = 10;
@@ -110,24 +109,18 @@
 		typeTraits[5] = new TypeTrait(4);
 		typeTraits[6] = new TypeTrait(8);
 
-		// declare value providers
-		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
-				.createPrimitiveValueProvider();
-		valueProviders[1] = valueProviders[0];
-		valueProviders[2] = valueProviders[0];
-		valueProviders[3] = valueProviders[0];
+		// create value providers
+		IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.comparatorsToPrimitiveValueProviderFactories(cmps); 
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
-				valueProviders);
+		MultiComparator cmp = new MultiComparator(cmps);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -137,14 +130,14 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, fileId, 0, metaFrameFactory);
 
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
+		RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager,
+				interiorFrameFactory, leafFrameFactory);
+		rtree.create(fileId);
 		rtree.open(fileId);
 
 		ByteBuffer hyracksFrame = ctx.allocateFrame();
 		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
 		DataOutput dos = tb.getDataOutput();
 
 		@SuppressWarnings("rawtypes")
@@ -162,8 +155,7 @@
 		accessor.reset(hyracksFrame);
 		FrameTupleReference tuple = new FrameTupleReference();
 
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT);
 
 		Random rnd = new Random();
 		rnd.setSeed(50);
@@ -231,15 +223,13 @@
 		print("DISK-ORDER SCAN:\n");
 		TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(
 				leafFrame);
-		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(
-				IndexOp.DISKORDERSCAN, leafFrame, null, null);
-		rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame,
-				diskOrderScanOpCtx);
+		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN);
+		rtree.diskOrderScan(diskOrderCursor, diskOrderScanOpCtx);
 		try {
 			while (diskOrderCursor.hasNext()) {
 				diskOrderCursor.next();
 				ITupleReference frameTuple = diskOrderCursor.getTuple();
-				String rec = cmp.printTuple(frameTuple, recDescSers);
+				String rec = TupleUtils.printTuple(frameTuple, recDescSers);
 				print(rec + "\n");
 			}
 		} catch (Exception e) {
@@ -298,24 +288,18 @@
 		typeTraits[5] = new TypeTrait(4);
 		typeTraits[6] = new TypeTrait(8);
 
-		// declare value providers
-		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
-				.createPrimitiveValueProvider();
-		valueProviders[1] = valueProviders[0];
-		valueProviders[2] = valueProviders[0];
-		valueProviders[3] = valueProviders[0];
+		// create value providers
+		IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.comparatorsToPrimitiveValueProviderFactories(cmps); 
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
-				valueProviders);
+		MultiComparator cmp = new MultiComparator(cmps);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -325,14 +309,14 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, fileId, 0, metaFrameFactory);
 
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
+		RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager,
+				interiorFrameFactory, leafFrameFactory);
+		rtree.create(fileId);
 		rtree.open(fileId);
 
 		ByteBuffer hyracksFrame = ctx.allocateFrame();
 		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
 		DataOutput dos = tb.getDataOutput();
 
 		@SuppressWarnings("rawtypes")
@@ -350,8 +334,7 @@
 		accessor.reset(hyracksFrame);
 		FrameTupleReference tuple = new FrameTupleReference();
 
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT);
 
 		Random rnd = new Random();
 		rnd.setSeed(50);
@@ -413,8 +396,7 @@
 		String rtreeStats = rtree.printStats();
 		print(rtreeStats);
 
-		RTreeOpContext deleteOpCtx = rtree.createOpContext(IndexOp.DELETE,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext deleteOpCtx = rtree.createOpContext(IndexOp.DELETE);
 		rnd.setSeed(50);
 		for (int i = 0; i < 5000; i++) {
 
@@ -522,26 +504,18 @@
 		typeTraits[7] = new TypeTrait(4);
 		typeTraits[8] = new TypeTrait(8);
 
-		// declare value providers
-		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
-				.createPrimitiveValueProvider();
-		valueProviders[1] = valueProviders[0];
-		valueProviders[2] = valueProviders[0];
-		valueProviders[3] = valueProviders[0];
-		valueProviders[4] = valueProviders[0];
-		valueProviders[5] = valueProviders[0];
+		// create value providers
+		IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.comparatorsToPrimitiveValueProviderFactories(cmps); 
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
-				valueProviders);
+		MultiComparator cmp = new MultiComparator(cmps);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -551,14 +525,14 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, fileId, 0, metaFrameFactory);
 
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
+		RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager,
+				interiorFrameFactory, leafFrameFactory);
+		rtree.create(fileId);
 		rtree.open(fileId);
 
 		ByteBuffer hyracksFrame = ctx.allocateFrame();
 		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
 		DataOutput dos = tb.getDataOutput();
 
 		@SuppressWarnings("rawtypes")
@@ -578,8 +552,7 @@
 		accessor.reset(hyracksFrame);
 		FrameTupleReference tuple = new FrameTupleReference();
 
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT);
 
 		Random rnd = new Random();
 		rnd.setSeed(50);
@@ -654,15 +627,13 @@
 		print("DISK-ORDER SCAN:\n");
 		TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(
 				leafFrame);
-		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(
-				IndexOp.DISKORDERSCAN, leafFrame, null, null);
-		rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame,
-				diskOrderScanOpCtx);
+		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN);
+		rtree.diskOrderScan(diskOrderCursor, diskOrderScanOpCtx);
 		try {
 			while (diskOrderCursor.hasNext()) {
 				diskOrderCursor.next();
 				ITupleReference frameTuple = diskOrderCursor.getTuple();
-				String rec = cmp.printTuple(frameTuple, recDescSers);
+				String rec = TupleUtils.printTuple(frameTuple, recDescSers);
 				print(rec + "\n");
 			}
 		} catch (Exception e) {
@@ -721,24 +692,18 @@
 		typeTraits[5] = new TypeTrait(4);
 		typeTraits[6] = new TypeTrait(8);
 
-		// declare value providers
-		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-		valueProviders[0] = IntegerPrimitiveValueProviderFactory.INSTANCE
-				.createPrimitiveValueProvider();
-		valueProviders[1] = valueProviders[0];
-		valueProviders[2] = valueProviders[0];
-		valueProviders[3] = valueProviders[0];
+		// create value providers
+		IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.comparatorsToPrimitiveValueProviderFactories(cmps); 
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
-				valueProviders);
+		MultiComparator cmp = new MultiComparator(cmps);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -748,14 +713,14 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, fileId, 0, metaFrameFactory);
 
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
+		RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager,
+				interiorFrameFactory, leafFrameFactory);
+		rtree.create(fileId);
 		rtree.open(fileId);
 
 		ByteBuffer hyracksFrame = ctx.allocateFrame();
 		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
 		DataOutput dos = tb.getDataOutput();
 
 		@SuppressWarnings("rawtypes")
@@ -773,8 +738,7 @@
 		accessor.reset(hyracksFrame);
 		FrameTupleReference tuple = new FrameTupleReference();
 
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT);
 
 		Random rnd = new Random();
 		rnd.setSeed(50);
@@ -842,15 +806,13 @@
 		print("DISK-ORDER SCAN:\n");
 		TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(
 				leafFrame);
-		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(
-				IndexOp.DISKORDERSCAN, leafFrame, null, null);
-		rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame,
-				diskOrderScanOpCtx);
+		RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN);
+		rtree.diskOrderScan(diskOrderCursor, diskOrderScanOpCtx);
 		try {
 			while (diskOrderCursor.hasNext()) {
 				diskOrderCursor.next();
 				ITupleReference frameTuple = diskOrderCursor.getTuple();
-				String rec = cmp.printTuple(frameTuple, recDescSers);
+				String rec = TupleUtils.printTuple(frameTuple, recDescSers);
 				print(rec + "\n");
 			}
 		} catch (Exception e) {
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
index b04afe5..c87f0da 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
@@ -43,7 +43,7 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
@@ -57,12 +57,12 @@
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -110,24 +110,18 @@
 		typeTraits[3] = new TypeTrait(8);
 		typeTraits[4] = new TypeTrait(4);
 
-		// declare value providers
-		IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-		valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE
-				.createPrimitiveValueProvider();
-		valueProviders[1] = valueProviders[0];
-		valueProviders[2] = valueProviders[0];
-		valueProviders[3] = valueProviders[0];
+		// create value providers
+		IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.comparatorsToPrimitiveValueProviderFactories(cmps); 
 
-		MultiComparator cmp = new MultiComparator(typeTraits, cmps,
-				valueProviders);
+		MultiComparator cmp = new MultiComparator(cmps);
 
 		RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
 				typeTraits);
 
 		ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(
-				tupleWriterFactory, keyFieldCount);
+				tupleWriterFactory, valueProviderFactories);
 		ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 		ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -138,14 +132,14 @@
 		IFreePageManager freePageManager = new LinkedListFreePageManager(
 				bufferCache, fileId, 0, metaFrameFactory);
 
-		RTree rtree = new RTree(bufferCache, freePageManager,
-				interiorFrameFactory, leafFrameFactory, cmp);
-		rtree.create(fileId, leafFrame, metaFrame);
+		RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager,
+				interiorFrameFactory, leafFrameFactory);
+		rtree.create(fileId);
 		rtree.open(fileId);
 
 		ByteBuffer hyracksFrame = ctx.allocateFrame();
 		FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-		ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+		ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
 		DataOutput dos = tb.getDataOutput();
 
 		@SuppressWarnings("rawtypes")
@@ -161,8 +155,7 @@
 		accessor.reset(hyracksFrame);
 		FrameTupleReference tuple = new FrameTupleReference();
 
-		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT,
-				leafFrame, interiorFrame, metaFrame);
+		RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT);
 
 		Random rnd = new Random();
 		rnd.setSeed(50);
@@ -249,8 +242,7 @@
 					interiorFrame, leafFrame);
 			SearchPredicate searchPredicate = new SearchPredicate(tuple, cmp);
 
-			RTreeOpContext searchOpCtx = rtree.createOpContext(IndexOp.SEARCH,
-					leafFrame, interiorFrame, metaFrame);
+			RTreeOpContext searchOpCtx = rtree.createOpContext(IndexOp.SEARCH);
 			rtree.search(searchCursor, searchPredicate, searchOpCtx);
 
 			ArrayList<Integer> results = new ArrayList<Integer>();