Merged hyracks_dev_next -r 1287 into trunk

git-svn-id: https://hyracks.googlecode.com/svn/trunk@1288 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks/hyracks-tests/.project b/hyracks/hyracks-tests/.project
deleted file mode 100644
index 198463d..0000000
--- a/hyracks/hyracks-tests/.project
+++ /dev/null
@@ -1,17 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-tests</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks/hyracks-tests/.settings/org.maven.ide.eclipse.prefs b/hyracks/hyracks-tests/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 99b89a6..0000000
--- a/hyracks/hyracks-tests/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jan 06 11:27:16 PST 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.classpath b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.project b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.project
deleted file mode 100644
index bc6bc56..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-btree-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 7cf8ad6..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,264 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
-org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_assignment=0
-org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
-org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
-org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
-org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
-org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
-org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
-org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
-org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
-org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_after_package=1
-org.eclipse.jdt.core.formatter.blank_lines_before_field=0
-org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
-org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
-org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
-org.eclipse.jdt.core.formatter.blank_lines_before_method=1
-org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
-org.eclipse.jdt.core.formatter.blank_lines_before_package=0
-org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
-org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
-org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
-org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
-org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
-org.eclipse.jdt.core.formatter.comment.format_block_comments=true
-org.eclipse.jdt.core.formatter.comment.format_header=false
-org.eclipse.jdt.core.formatter.comment.format_html=true
-org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
-org.eclipse.jdt.core.formatter.comment.format_line_comments=true
-org.eclipse.jdt.core.formatter.comment.format_source_code=true
-org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
-org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
-org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
-org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
-org.eclipse.jdt.core.formatter.comment.line_length=80
-org.eclipse.jdt.core.formatter.compact_else_if=true
-org.eclipse.jdt.core.formatter.continuation_indentation=2
-org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
-org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
-org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
-org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_empty_lines=false
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
-org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
-org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
-org.eclipse.jdt.core.formatter.indentation.size=4
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
-org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
-org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
-org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
-org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
-org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
-org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
-org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
-org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
-org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
-org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
-org.eclipse.jdt.core.formatter.join_lines_in_comments=true
-org.eclipse.jdt.core.formatter.join_wrapped_lines=true
-org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
-org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
-org.eclipse.jdt.core.formatter.lineSplit=120
-org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
-org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
-org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
-org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
-org.eclipse.jdt.core.formatter.tabulation.char=space
-org.eclipse.jdt.core.formatter.tabulation.size=4
-org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
-org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 99b89a6..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jan 06 11:27:16 PST 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 09b3282..68ab309 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -2,12 +2,12 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-storage-am-btree-test</artifactId>
-  <version>0.1.9-SNAPSHOT</version>
+  <version>0.2.0-SNAPSHOT</version>
 
   <parent>
     <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.1.9-SNAPSHOT</version>
+    <version>0.2.0-SNAPSHOT</version>
   </parent>
 
   <build>
@@ -34,20 +34,20 @@
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-storage-am-btree</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>test</scope>
   	</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
deleted file mode 100644
index 56ae6e9..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-
-public abstract class AbstractBTreeTest {
-
-    protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
-
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-
-    protected void print(String str) {
-        System.out.print(str);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
new file mode 100644
index 0000000..5bb86b9
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
@@ -0,0 +1,624 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+@SuppressWarnings("rawtypes")
+public class BTreeExamplesTest extends AbstractBTreeTest {
+
+    /**
+     * Fixed-Length Key,Value Example.
+     * 
+     * Create a BTree with one fixed-length key field and one fixed-length value
+     * field. Fill BTree with random values using insertions (not bulk load).
+     * Perform scans and range search.
+     */
+    @Test
+    public void fixedLengthKeyValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Fixed-Length Key,Value Example.");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < numInserts; i++) {
+            int f0 = rnd.nextInt() % numInserts;
+            int f1 = 5;
+            TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(btree, indexAccessor, fieldSerdes);
+        diskOrderScan(btree, indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(keyFieldCount);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -1000);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(keyFieldCount);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 1000);
+
+        rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        btree.close();
+    }
+
+    /**
+     * Composite Key Example (Non-Unique B-Tree).
+     * 
+     * Create a BTree with two fixed-length key fields and one fixed-length
+     * value field. Fill BTree with random values using insertions (not bulk
+     * load) Perform scans and range search.
+     */
+    @Test
+    public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Composite Key Test");
+        }
+
+        // Declare fields.
+        int fieldCount = 3;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        int numInserts = 10000;
+        for (int i = 0; i < 10000; i++) {
+            int f0 = rnd.nextInt() % 2000;
+            int f1 = rnd.nextInt() % 1000;
+            int f2 = 5;
+            TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(btree, indexAccessor, fieldSerdes);
+        diskOrderScan(btree, indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
+
+        // Prefix-Range search in [-3, 3]
+        rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        btree.close();
+    }
+
+    /**
+     * Variable-Length Example. Create a BTree with one variable-length key
+     * field and one variable-length value field. Fill BTree with random values
+     * using insertions (not bulk load) Perform ordered scans and range search.
+     */
+    @Test
+    public void varLenKeyValueExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Variable-Length Key,Value Example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        long start = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        // Max string length to be generated.
+        int maxLength = 10;
+        int numInserts = 10000;
+        for (int i = 0; i < 10000; i++) {
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + f0 + " " + f1);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+        }
+
+        orderedScan(btree, indexAccessor, fieldSerdes);
+        diskOrderScan(btree, indexAccessor, fieldSerdes);
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createTuple(lowKeyTb, lowKey, fieldSerdes, "cbf");
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createTuple(highKeyTb, highKey, fieldSerdes, "cc7");
+
+        rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        btree.close();
+    }
+
+    /**
+     * Deletion Example.
+     * 
+     * Create a BTree with one variable-length key field and one variable-length
+     * value field. Fill B-tree with random values using insertions, then delete
+     * entries one-by-one. Repeat procedure a few times on same BTree.
+     */
+    @Test
+    public void deleteExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Deletion Example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        // Max string length to be generated.
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
+                LOGGER.info("Inserting into tree...");
+            }
+            int maxLength = 10;
+            int ins = 10000;
+            String[] f0s = new String[ins];
+            String[] f1s = new String[ins];
+            int insDone = 0;
+            int[] insDoneCmp = new int[ins];
+            for (int i = 0; i < ins; i++) {
+                String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+                f0s[i] = f0;
+                f1s[i] = f1;
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Inserting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.insert(tuple);
+                    insDone++;
+                } catch (TreeIndexException e) {
+                }
+                insDoneCmp[i] = insDone;
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deleting from tree...");
+            }
+            int delDone = 0;
+            for (int i = 0; i < ins; i++) {
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("Deleting " + i);
+                    }
+                }
+                try {
+                    indexAccessor.delete(tuple);
+                    delDone++;
+                } catch (TreeIndexException e) {
+                }
+                if (insDoneCmp[i] != delDone) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
+                        LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+                    }
+                    break;
+                }
+            }
+            if (insDone != delDone) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+                }
+                break;
+            }
+        }
+        btree.close();
+    }
+
+    /**
+     * Update example.
+     * 
+     * Create a BTree with one variable-length key field and one variable-length
+     * value field. Fill B-tree with random values using insertions, then update
+     * entries one-by-one. Repeat procedure a few times on same BTree.
+     */
+    @Test
+    public void updateExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Update example");
+        }
+
+        // Declare fields.
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+        typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+
+        // Declare keys.
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Inserting into tree...");
+        }
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        int maxLength = 10;
+        int ins = 10000;
+        String[] keys = new String[10000];
+        for (int i = 0; i < ins; i++) {
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+            keys[i] = f0;
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("Inserting " + i);
+                }
+            }
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            }
+        }
+        // Print before doing any updates.
+        orderedScan(btree, indexAccessor, fieldSerdes);
+
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
+                LOGGER.info("Updating BTree");
+            }
+            for (int i = 0; i < ins; i++) {
+                // Generate a new random value for f1.
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if (i % 1000 == 0) {
+                        LOGGER.info("UPDATING " + i);
+                    }
+                }
+                try {
+                    indexAccessor.update(tuple);
+                } catch (TreeIndexException e) {
+                }
+            }
+            // Do another scan after a round of updates.
+            orderedScan(btree, indexAccessor, fieldSerdes);
+        }
+        btree.close();
+    }
+
+    /**
+     * Bulk load example.
+     * 
+     * Load a tree with 100,000 tuples. BTree has a composite key to "simulate"
+     * non-unique index creation.
+     * 
+     */
+    @Test
+    public void bulkLoadExample() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk load example");
+        }
+        // Declare fields.
+        int fieldCount = 3;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        // Declare field serdes.
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+        BTree btree = BTreeUtils
+                .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        // Load sorted records.
+        int ins = 100000;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Bulk loading " + ins + " tuples");
+        }
+        long start = System.currentTimeMillis();
+        IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f);
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        for (int i = 0; i < ins; i++) {
+            TupleUtils.createIntegerTuple(tb, tuple, i, i, 5);
+            btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
+        }
+        btree.endBulkLoad(bulkLoadCtx);
+        long end = System.currentTimeMillis();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
+        }
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        // Build low key.
+        ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(lowKeyTb, lowKey, 44444);
+
+        // Build high key.
+        ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        TupleUtils.createIntegerTuple(highKeyTb, highKey, 44500);
+
+        // Prefix-Range search in [44444, 44500]
+        rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
+
+        btree.close();
+    }
+
+    private void orderedScan(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+            throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Ordered Scan:");
+        }
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, false);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        indexAccessor.search(scanCursor, nullPred);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            scanCursor.close();
+        }
+    }
+
+    private void diskOrderScan(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+            throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Disk-Order Scan:");
+        }
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+        TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+        indexAccessor.diskOrderScan(diskOrderCursor);
+        try {
+            while (diskOrderCursor.hasNext()) {
+                diskOrderCursor.next();
+                ITupleReference frameTuple = diskOrderCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            diskOrderCursor.close();
+        }
+    }
+
+    private void rangeSearch(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes,
+            ITupleReference lowKey, ITupleReference highKey) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
+            String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
+            LOGGER.info("Range-Search in: [" + lowKeyString + ", " + highKeyString + "]");
+        }
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+        MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getMultiComparator(), lowKey);
+        MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getMultiComparator(), highKey);
+        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, lowKeySearchCmp,
+                highKeySearchCmp);
+        indexAccessor.search(rangeCursor, rangePred);
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } finally {
+            rangeCursor.close();
+        }
+    }
+
+    public static String randomString(int length, Random random) {
+        String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
deleted file mode 100644
index 7d93761..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ /dev/null
@@ -1,1179 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- * 
- *     http://www.apache.org/licenses/LICENSE-2.0
- * 
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.util.Random;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class BTreeTest extends AbstractBTreeTest {
-
-    private static final int PAGE_SIZE = 256;
-    private static final int NUM_PAGES = 10;
-    private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
-    // FIXED-LENGTH KEY TEST
-    // create a B-tree with one fixed-length "key" field and one fixed-length
-    // "value" field
-    // fill B-tree with random values using insertions (not bulk load)
-    // perform ordered scan and range search
-    @Test
-    public void test01() throws Exception {
-
-        LOGGER.info("FIXED-LENGTH KEY TEST");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 2;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        long start = System.currentTimeMillis();
-
-        LOGGER.info("INSERTING INTO TREE");
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-
-        // 10000
-        for (int i = 0; i < 10000; i++) {
-
-            int f0 = rnd.nextInt() % 10000;
-            int f1 = 5;
-
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            if (i % 1000 == 0) {
-                long end = System.currentTimeMillis();
-                LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
-            }
-
-            try {
-                btree.insert(tuple, insertOpCtx);
-            } catch (TreeIndexException e) {
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        // btree.printTree(leafFrame, interiorFrame);
-
-        int maxPage = btree.getFreePageManager().getMaxPage(metaFrame);
-        LOGGER.info("MAXPAGE: " + maxPage);
-        LOGGER.info(btree.printStats());
-
-        long end = System.currentTimeMillis();
-        long duration = end - start;
-        LOGGER.info("DURATION: " + duration);
-
-        // ordered scan
-
-        LOGGER.info("ORDERED SCAN:");
-        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
-        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-        btree.search(scanCursor, nullPred, searchOpCtx);
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            scanCursor.close();
-        }
-
-        // disk-order scan
-        LOGGER.info("DISK-ORDER SCAN:");
-        TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
-        BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
-        btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
-        try {
-            while (diskOrderCursor.hasNext()) {
-                diskOrderCursor.next();
-                ITupleReference frameTuple = diskOrderCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            diskOrderCursor.close();
-        }
-
-        // range search in [-1000, 1000]
-        LOGGER.info("RANGE SEARCH:");
-
-        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
-
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
-        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        btree.search(rangeCursor, rangePred, searchOpCtx);
-
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            rangeCursor.close();
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
-    // create a B-tree with one two fixed-length "key" fields and one
-    // fixed-length "value" field
-    // fill B-tree with random values using insertions (not bulk load)
-    // perform ordered scan and range search
-    @Test
-    public void test02() throws Exception {
-
-        LOGGER.info("COMPOSITE KEY TEST");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 3;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        typeTraits[2] = new TypeTrait(4);
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        long start = System.currentTimeMillis();
-
-        LOGGER.info("INSERTING INTO TREE");
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-
-        for (int i = 0; i < 10000; i++) {
-            int f0 = rnd.nextInt() % 2000;
-            int f1 = rnd.nextInt() % 1000;
-            int f2 = 5;
-
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            if (i % 1000 == 0) {
-                LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1);
-            }
-
-            try {
-                btree.insert(tuple, insertOpCtx);
-            } catch (Exception e) {
-            }
-        }
-        // btree.printTree(leafFrame, interiorFrame);
-
-        long end = System.currentTimeMillis();
-        long duration = end - start;
-        LOGGER.info("DURATION: " + duration);
-
-        // try a simple index scan
-        LOGGER.info("ORDERED SCAN:");
-        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
-        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-        btree.search(scanCursor, nullPred, searchOpCtx);
-
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            scanCursor.close();
-        }
-
-        // range search in [(-3),(3)]
-        LOGGER.info("RANGE SEARCH:");
-        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
-
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
-        // only
-        // a
-        // single
-        // comparator
-        // for
-        // searching
-
-        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        btree.search(rangeCursor, rangePred, searchOpCtx);
-
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            rangeCursor.close();
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    // VARIABLE-LENGTH TEST
-    // create a B-tree with one variable-length "key" field and one
-    // variable-length "value" field
-    // fill B-tree with random values using insertions (not bulk load)
-    // perform ordered scan and range search
-    @Test
-    public void test03() throws Exception {
-
-        LOGGER.info("VARIABLE-LENGTH KEY TEST");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 2;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-        typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-        int maxLength = 10; // max string length to be generated
-        for (int i = 0; i < 10000; i++) {
-
-            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-
-            tb.reset();
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
-            tb.addFieldEndOffset();
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            if (i % 1000 == 0) {
-                LOGGER.info("INSERTING " + i);
-            }
-
-            try {
-                btree.insert(tuple, insertOpCtx);
-            } catch (Exception e) {
-            }
-        }
-        // btree.printTree();
-
-        LOGGER.info("DONE INSERTING");
-
-        // ordered scan
-        LOGGER.info("ORDERED SCAN:");
-        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
-        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-        btree.search(scanCursor, nullPred, searchOpCtx);
-
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            scanCursor.close();
-        }
-
-        // range search in ["cbf", cc7"]
-        LOGGER.info("RANGE SEARCH:");
-
-        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
-
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
-        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        btree.search(rangeCursor, rangePred, searchOpCtx);
-
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            rangeCursor.close();
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    // DELETION TEST
-    // create a B-tree with one variable-length "key" field and one
-    // variable-length "value" field
-    // fill B-tree with random values using insertions, then delete entries
-    // one-by-one
-    // repeat procedure a few times on same B-tree
-    @Test
-    public void test04() throws Exception {
-
-        LOGGER.info("DELETION TEST");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 2;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-        typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
-        // declare keys
-        int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
-                UTF8StringSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-        BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
-
-        int runs = 3;
-        for (int run = 0; run < runs; run++) {
-
-            LOGGER.info("DELETION TEST RUN: " + (run + 1) + "/" + runs);
-
-            LOGGER.info("INSERTING INTO BTREE");
-            int maxLength = 10;
-            int ins = 10000;
-            String[] f0s = new String[ins];
-            String[] f1s = new String[ins];
-            int insDone = 0;
-            int[] insDoneCmp = new int[ins];
-            for (int i = 0; i < ins; i++) {
-                String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-
-                f0s[i] = f0;
-                f1s[i] = f1;
-
-                tb.reset();
-                UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
-                tb.addFieldEndOffset();
-                UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
-                tb.addFieldEndOffset();
-
-                appender.reset(frame, true);
-                appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-                tuple.reset(accessor, 0);
-
-                if (i % 1000 == 0) {
-                    LOGGER.info("INSERTING " + i);
-                }
-
-                try {
-                    btree.insert(tuple, insertOpCtx);
-                    insDone++;
-                } catch (TreeIndexException e) {
-                    // e.printStackTrace();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-
-                insDoneCmp[i] = insDone;
-            }
-            // btree.printTree();
-            // btree.printStats();
-
-            LOGGER.info("DELETING FROM BTREE");
-            int delDone = 0;
-            for (int i = 0; i < ins; i++) {
-
-                tb.reset();
-                UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
-                tb.addFieldEndOffset();
-                UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
-                tb.addFieldEndOffset();
-
-                appender.reset(frame, true);
-                appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-                tuple.reset(accessor, 0);
-
-                if (i % 1000 == 0) {
-                    LOGGER.info("DELETING " + i);
-                }
-
-                try {
-                    btree.delete(tuple, deleteOpCtx);
-                    delDone++;
-                } catch (TreeIndexException e) {
-                    // e.printStackTrace();
-                } catch (Exception e) {
-                    e.printStackTrace();
-                }
-
-                if (insDoneCmp[i] != delDone) {
-                    LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION TEST");
-                    LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
-                    break;
-                }
-            }
-            // btree.printTree(leafFrame, interiorFrame);
-
-            if (insDone != delDone) {
-                LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
-                break;
-            }
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    // BULK LOAD TEST
-    // insert 100,000 records in bulk
-    // B-tree has a composite key to "simulate" non-unique index creation
-    // do range search
-    @Test
-    public void test05() throws Exception {
-
-        LOGGER.info("BULK LOAD TEST");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 3;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        typeTraits[2] = new TypeTrait(4);
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
-        ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
-
-        // generate sorted records
-        int ins = 100000;
-        LOGGER.info("BULK LOADING " + ins + " RECORDS");
-        long start = System.currentTimeMillis();
-        for (int i = 0; i < ins; i++) {
-
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
-        }
-
-        btree.endBulkLoad(bulkLoadCtx);
-
-        // btree.printTree(leafFrame, interiorFrame);
-
-        long end = System.currentTimeMillis();
-        long duration = end - start;
-        LOGGER.info("DURATION: " + duration);
-
-        // range search
-        LOGGER.info("RANGE SEARCH:");
-        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
-
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
-        // TODO: check when searching backwards
-        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-        btree.search(rangeCursor, rangePred, searchOpCtx);
-
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            rangeCursor.close();
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
-    // demo for Arjun to show easy support of intersection queries on
-    // time-intervals
-    @Test
-    public void test06() throws Exception {
-
-        LOGGER.info("TIME-INTERVAL INTERSECTION DEMO");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // declare fields
-        int fieldCount = 3;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        typeTraits[2] = new TypeTrait(4);
-
-        // declare keys
-        int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
-        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
-
-        Random rnd = new Random();
-        rnd.setSeed(50);
-
-        ByteBuffer frame = ctx.allocateFrame();
-        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
-
-        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-        accessor.reset(frame);
-        FrameTupleReference tuple = new FrameTupleReference();
-
-        long start = System.currentTimeMillis();
-
-        int intervalCount = 10;
-        int[][] intervals = new int[intervalCount][2];
-
-        intervals[0][0] = 10;
-        intervals[0][1] = 20;
-
-        intervals[1][0] = 11;
-        intervals[1][1] = 20;
-
-        intervals[2][0] = 12;
-        intervals[2][1] = 20;
-
-        intervals[3][0] = 13;
-        intervals[3][1] = 20;
-
-        intervals[4][0] = 14;
-        intervals[4][1] = 20;
-
-        intervals[5][0] = 20;
-        intervals[5][1] = 30;
-
-        intervals[6][0] = 20;
-        intervals[6][1] = 31;
-
-        intervals[7][0] = 20;
-        intervals[7][1] = 32;
-
-        intervals[8][0] = 20;
-        intervals[8][1] = 33;
-
-        intervals[9][0] = 20;
-        intervals[9][1] = 35;
-
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-
-        // int exceptionCount = 0;
-        for (int i = 0; i < intervalCount; i++) {
-            int f0 = intervals[i][0];
-            int f1 = intervals[i][1];
-            int f2 = rnd.nextInt() % 100;
-
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            LOGGER.info("INSERTING " + i);
-
-            try {
-                btree.insert(tuple, insertOpCtx);
-            } catch (Exception e) {
-            }
-        }
-        // btree.printTree(leafFrame, interiorFrame);
-        // btree.printStats();
-
-        long end = System.currentTimeMillis();
-        long duration = end - start;
-        LOGGER.info("DURATION: " + duration);
-
-        // try a simple index scan
-
-        LOGGER.info("ORDERED SCAN:");
-        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
-        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
-        BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-        btree.search(scanCursor, nullPred, searchOpCtx);
-
-        try {
-            while (scanCursor.hasNext()) {
-                scanCursor.next();
-                ITupleReference frameTuple = scanCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            scanCursor.close();
-        }
-
-        // try a range search
-        LOGGER.info("RANGE SEARCH:");
-        ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
-
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE,
-                IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
-        ktb.addFieldEndOffset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
-        ktb.addFieldEndOffset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
-
-        IBinaryComparator[] searchCmps = new IBinaryComparator[2];
-        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
-        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
-        btree.search(rangeCursor, rangePred, searchOpCtx);
-
-        try {
-            while (rangeCursor.hasNext()) {
-                rangeCursor.next();
-                ITupleReference frameTuple = rangeCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                LOGGER.info(rec);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-        } finally {
-            rangeCursor.close();
-        }
-
-        btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
-    }
-
-    public static String randomString(int length, Random random) {
-        String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
-        StringBuilder strBuilder = new StringBuilder();
-        for (int i = 0; i < s.length() && i < length; i++) {
-            strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
-        }
-        return strBuilder.toString();
-    }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
new file mode 100644
index 0000000..1daa273
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
@@ -0,0 +1,133 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+
+@SuppressWarnings("rawtypes")
+public abstract class BTreeTestDriver extends AbstractBTreeTest {
+
+    protected static final int numTuplesToInsert = 10000;
+    
+    protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception;
+    protected abstract String getTestOpName();
+    
+    @Test
+    public void oneIntKeyAndValue() throws Exception {        
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
+        }
+                
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        // Range search in [-1000, 1000]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
+        
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
+    }
+    
+    @Test
+    public void twoIntKeys() throws Exception {    
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
+        }
+        
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        
+        // Range search in [50 0, 50 500]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
+        
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+    
+    @Test
+    public void twoIntKeysAndValues() throws Exception {  
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
+        }
+        
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        
+        // Range search in [50 100, 100 100]
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
+        
+        // Prefix range search in [50, 50]
+        ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+        ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }        
+    
+    @Test
+    public void oneStringKeyAndValue() throws Exception {        
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
+        }
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", cc7"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
+        runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
+    }
+    
+    @Test
+    public void twoStringKeys() throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
+        }
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+        
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+    
+    @Test
+    public void twoStringKeysAndValues() throws Exception {      
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
+        }
+        
+        ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+        
+        // Range search in ["cbf", "ddd", cc7", "eee"]
+        ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+        ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+        
+        // Prefix range search in ["cbf", cc7"]
+        ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+        ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+        
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+        runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
new file mode 100644
index 0000000..49d08a3
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class BulkLoadTest extends BTreeTestDriver {
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.bulkLoadIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.bulkLoadStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+
+        BTreeTestUtils.checkPointSearches(testCtx);
+        BTreeTestUtils.checkOrderedScan(testCtx);
+        BTreeTestUtils.checkDiskOrderScan(testCtx);
+        BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "BulkLoad";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
new file mode 100644
index 0000000..2157adb
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class DeleteTest extends BTreeTestDriver {
+    
+    private static final int numInsertRounds = 3;
+    private static final int numDeleteRounds = 3;
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+        for (int i = 0; i < numInsertRounds; i++) {
+            
+            // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+            if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+                BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+            } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+                BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+            }
+            
+            int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numDeleteRounds);
+            for(int j = 0; j < numDeleteRounds; j++) {
+                BTreeTestUtils.deleteTuples(testCtx, numTuplesPerDeleteRound, rnd);
+                
+                BTreeTestUtils.checkPointSearches(testCtx);
+                BTreeTestUtils.checkOrderedScan(testCtx);
+                BTreeTestUtils.checkDiskOrderScan(testCtx);
+                BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+                if (prefixLowKey != null && prefixHighKey != null) {
+                    BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+                }
+            }
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Delete";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
similarity index 64%
rename from hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index f155b2b..e8aa00a 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -16,55 +16,52 @@
 package edu.uci.ics.hyracks.storage.am.btree;
 
 import java.io.DataOutput;
-import java.io.File;
 import java.nio.ByteBuffer;
 import java.util.Random;
+import java.util.logging.Level;
 
 import org.junit.Assert;
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
-public class BTreeFieldPrefixNSMTest extends AbstractBTreeTest {
+public class FieldPrefixNSMTest extends AbstractBTreeTest {
 
     private static final int PAGE_SIZE = 32768; // 32K
     private static final int NUM_PAGES = 40;
     private static final int MAX_OPEN_FILES = 10;
     private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
-    private ITupleReference createTuple(IHyracksStageletContext ctx, int f0, int f1, int f2, boolean print)
+    private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
             throws HyracksDataException {
-        if (print)
-            LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
+        if (print) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
+            }
+        }
 
         ByteBuffer buf = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
@@ -97,45 +94,36 @@
     @Test
     public void test01() throws Exception {
 
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
         // declare fields
         int fieldCount = 3;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        typeTraits[2] = new TypeTrait(4);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
 
         // declare keys
         int keyFieldCount = 3;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[2] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        cmps[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        MultiComparator cmp = new MultiComparator(cmps);
 
         // just for printing
-        ISerializerDeserializer[] sers = { IntegerSerializerDeserializer.INSTANCE,
+        ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
                 IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
 
         Random rnd = new Random();
         rnd.setSeed(50);
 
-        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, 0), false);
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), false);
         try {
 
-            IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
             ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
             BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
             frame.setPage(page);
             frame.initBuffer((byte) 0);
-            slotManager.setFrame(frame);
+            frame.setMultiComparator(cmp);
             frame.setPrefixTupleCount(0);
 
             String before = new String();
@@ -151,17 +139,20 @@
             // insert records with random calls to compact and compress
             for (int i = 0; i < numRecords; i++) {
 
-                if ((i + 1) % 100 == 0)
-                    LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if ((i + 1) % 100 == 0) {
+                        LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
+                    }
+                }
 
                 int a = rnd.nextInt() % smallMax;
                 int b = rnd.nextInt() % smallMax;
                 int c = i;
-                
+
                 ITupleReference tuple = createTuple(ctx, a, b, c, false);
                 try {
-                    int targetTupleIndex = frame.findTupleIndex(tuple, cmp);
-                    frame.insert(tuple, cmp, targetTupleIndex);
+                    int targetTupleIndex = frame.findInsertTupleIndex(tuple);
+                    frame.insert(tuple, targetTupleIndex);
                 } catch (BTreeException e) {
                     e.printStackTrace();
                 } catch (Exception e) {
@@ -173,16 +164,16 @@
                 savedFields[i][2] = c;
 
                 if (rnd.nextInt() % compactFreq == 0) {
-                    before = frame.printKeys(cmp, sers);                    
-                    frame.compact(cmp);
-                    after = frame.printKeys(cmp, sers);
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                     Assert.assertEquals(before, after);
                 }
 
                 if (rnd.nextInt() % compressFreq == 0) {
-                    before = frame.printKeys(cmp, sers);
-                    frame.compress(cmp);
-                    after = frame.printKeys(cmp, sers);
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                     Assert.assertEquals(before, after);
                 }
 
@@ -190,27 +181,30 @@
 
             // delete records with random calls to compact and compress
             for (int i = 0; i < numRecords; i++) {
-
-                if ((i + 1) % 100 == 0)
-                    LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    if ((i + 1) % 100 == 0) {
+                        LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
+                    }
+                }
 
                 ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
                 try {
-                    frame.delete(tuple, cmp, true);
+                    int tupleIndex = frame.findDeleteTupleIndex(tuple);
+                    frame.delete(tuple, tupleIndex);
                 } catch (Exception e) {
                 }
 
                 if (rnd.nextInt() % compactFreq == 0) {
-                    before = frame.printKeys(cmp, sers);
-                    frame.compact(cmp);
-                    after = frame.printKeys(cmp, sers);
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compact();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                     Assert.assertEquals(before, after);
                 }
 
                 if (rnd.nextInt() % compressFreq == 0) {
-                    before = frame.printKeys(cmp, sers);
-                    frame.compress(cmp);
-                    after = frame.printKeys(cmp, sers);
+                    before = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
+                    frame.compress();
+                    after = TreeIndexUtils.printFrameTuples(frame, fieldSerdes);
                     Assert.assertEquals(before, after);
                 }
             }
@@ -218,8 +212,21 @@
         } finally {
             bufferCache.unpin(page);
         }
+    }
 
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
     }
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
new file mode 100644
index 0000000..fa3f895
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ * 
+ * Each tests first fills a BTree with randomly generated tuples.
+ * We compare the following operations against expected results:
+ * 1. Point searches for all tuples.
+ * 2. Ordered scan.
+ * 3. Disk-order scan.
+ * 4. Range search (and prefix search for composite keys).
+ * 
+ */
+@SuppressWarnings("rawtypes")
+public class InsertTest extends BTreeTestDriver {        
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+        
+        BTreeTestUtils.checkPointSearches(testCtx);
+        BTreeTestUtils.checkOrderedScan(testCtx);
+        BTreeTestUtils.checkDiskOrderScan(testCtx);
+                
+        BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+        if (prefixLowKey != null && prefixHighKey != null) {
+            BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+        }
+        testCtx.btree.close();
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Insert";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
index c9ece57..4031e7f 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
@@ -18,126 +18,94 @@
 import java.io.ByteArrayInputStream;
 import java.io.DataInput;
 import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Random;
 import java.util.TreeSet;
+import java.util.logging.Level;
 
 import org.junit.Before;
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
+import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
 
 public class RangeSearchCursorTest extends AbstractBTreeTest {
-    private static final int PAGE_SIZE = 256;
-    private static final int NUM_PAGES = 10;
-    private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-
-    // declare fields
+    // Declare fields
     int fieldCount = 2;
-    ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+    ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
 
     TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
-    ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
-    ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
     ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
-
-    IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-    IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
     ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
-    IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-    ByteBuffer frame = ctx.allocateFrame();
-    FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-
-    ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
-            IntegerSerializerDeserializer.INSTANCE };
-    RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-    IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
-    FrameTupleReference tuple = new FrameTupleReference();
-
     Random rnd = new Random(50);
 
     @Before
-    public void setUp() {
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        accessor.reset(frame);
+    public void setUp() throws HyracksDataException {
+        super.setUp();
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
     }
 
     @Test
     public void uniqueIndexTest() throws Exception {
-
-        LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
+        }
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
 
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
 
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
 
         // generate keys
         int numKeys = 50;
@@ -155,19 +123,11 @@
         // insert keys into btree
         for (int i = 0; i < keys.size(); i++) {
 
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
             try {
-                btree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -192,41 +152,38 @@
         performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
 
         btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
     }
 
     @Test
     public void nonUniqueIndexTest() throws Exception {
-
-        LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
+        }
 
         // declare keys
         int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
 
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
 
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
 
         // generate keys
         int numKeys = 50;
@@ -241,19 +198,11 @@
         // insert keys into btree
         for (int i = 0; i < keys.size(); i++) {
 
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
             try {
-                btree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -278,44 +227,38 @@
         performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
 
         btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
     }
 
     @Test
     public void nonUniqueFieldPrefixIndexTest() throws Exception {
-
-        LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
-
-        ITreeIndexFrameFactory leafFrameFactory = new BTreeFieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
-        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
-
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
+        }
 
         // declare keys
         int keyFieldCount = 2;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+        cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
 
-        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
-        btree.open(fileId);
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
 
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
-        DataOutput dos = tb.getDataOutput();
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
 
         // generate keys
         int numKeys = 50;
@@ -330,19 +273,11 @@
         // insert keys into btree
         for (int i = 0; i < keys.size(); i++) {
 
-            tb.reset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
+            TupleUtils.createIntegerTuple(tupleBuilder, tuple, keys.get(i), i);
+            tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
 
             try {
-                btree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (BTreeException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -367,45 +302,18 @@
         performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
 
         btree.close();
-        bufferCache.closeFile(fileId);
-        bufferCache.close();
     }
 
     public RangePredicate createRangePredicate(int lk, int hk, boolean isForward, boolean lowKeyInclusive,
-            boolean highKeyInclusive, MultiComparator cmp, ITypeTrait[] typeTraits) throws HyracksDataException {
-        // build low and high keys
-        ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
-        DataOutput kdos = ktb.getDataOutput();
-
-        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
-        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
-        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
-        keyAccessor.reset(frame);
-
-        appender.reset(frame, true);
-
-        // build and append low key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(lk, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
-        // build and append high key
-        ktb.reset();
-        IntegerSerializerDeserializer.INSTANCE.serialize(hk, kdos);
-        ktb.addFieldEndOffset();
-        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+            boolean highKeyInclusive, MultiComparator cmp) throws HyracksDataException {
 
         // create tuplereferences for search keys
-        FrameTupleReference lowKey = new FrameTupleReference();
-        lowKey.reset(keyAccessor, 0);
-
-        FrameTupleReference highKey = new FrameTupleReference();
-        highKey.reset(keyAccessor, 1);
+        ITupleReference lowKey = TupleUtils.createIntegerTuple(lk);
+        ITupleReference highKey = TupleUtils.createIntegerTuple(hk);
 
         IBinaryComparator[] searchCmps = new IBinaryComparator[1];
-        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+        searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(searchCmps);
 
         RangePredicate rangePred = new RangePredicate(isForward, lowKey, highKey, lowKeyInclusive, highKeyInclusive,
                 searchCmp, searchCmp);
@@ -464,11 +372,11 @@
                 int lowKey = i;
                 int highKey = j;
 
-                ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
+                ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
                 RangePredicate rangePred = createRangePredicate(lowKey, highKey, isForward, lowKeyInclusive,
-                        highKeyInclusive, btree.getMultiComparator(), btree.getMultiComparator().getTypeTraits());
-                BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
-                btree.search(rangeCursor, rangePred, searchOpCtx);
+                        highKeyInclusive, btree.getMultiComparator());
+                ITreeIndexAccessor indexAccessor = btree.createAccessor();
+                indexAccessor.search(rangeCursor, rangePred);
 
                 try {
                     while (rangeCursor.hasNext()) {
@@ -502,27 +410,35 @@
                         else
                             u = ')';
 
-                        LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
+                        }
                         StringBuilder strBuilder = new StringBuilder();
                         for (Integer r : expectedResults) {
                             strBuilder.append(r + " ");
                         }
-                        LOGGER.info(strBuilder.toString());
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(strBuilder.toString());
+                        }
                     }
                 }
 
                 if (results.size() == expectedResults.size()) {
                     for (int k = 0; k < results.size(); k++) {
                         if (!results.get(k).equals(expectedResults.get(k))) {
-                            LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
-                            LOGGER.info(results.get(k) + " " + expectedResults.get(k));
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
+                                LOGGER.info(results.get(k) + " " + expectedResults.get(k));
+                            }
                             return false;
                         }
                     }
                 } else {
-                    LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
-                    LOGGER.info("RESULTS: " + results.size());
-                    LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
+                        LOGGER.info("RESULTS: " + results.size());
+                        LOGGER.info("EXPECTED RESULTS: " + expectedResults.size());
+                    }
                     return false;
                 }
             }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java
similarity index 73%
rename from hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java
index dcfb7a2..1ae80d6 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java
@@ -4,48 +4,50 @@
 import java.io.File;
 import java.nio.ByteBuffer;
 import java.util.Random;
+import java.util.logging.Level;
 
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexBufferCacheWarmup;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestUtils;
 
-public class BTreeStatsTest extends AbstractBTreeTest {
+public class StatsTest extends AbstractBTreeTest {
 
     // private static final int PAGE_SIZE = 256;
     // private static final int NUM_PAGES = 10;
@@ -54,7 +56,7 @@
     private static final int NUM_PAGES = 1000;
     private static final int MAX_OPEN_FILES = 10;
     private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
     @Test
     public void test01() throws Exception {
@@ -69,16 +71,16 @@
 
         // declare fields
         int fieldCount = 2;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
 
         // declare keys
         int keyFieldCount = 1;
-        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
 
         TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -91,8 +93,8 @@
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        btree.create(fileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(fileId);
         btree.open(fileId);
 
         Random rnd = new Random();
@@ -100,11 +102,13 @@
 
         long start = System.currentTimeMillis();
 
-        LOGGER.info("INSERTING INTO TREE");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("INSERTING INTO TREE");
+        }
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
@@ -114,8 +118,7 @@
         accessor.reset(frame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
-
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
         // 10000
         for (int i = 0; i < 100000; i++) {
 
@@ -133,13 +136,15 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 10000 == 0) {
-                long end = System.currentTimeMillis();
-                LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 10000 == 0) {
+                    long end = System.currentTimeMillis();
+                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+                }
             }
 
             try {
-                btree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -149,7 +154,9 @@
         TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
                 btree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
-        LOGGER.info(stats.toString());
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("\n" + stats.toString());
+        }
 
         TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager,
                 fileId);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
index 8b6185d..ac4133d 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -19,27 +19,25 @@
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Random;
+import java.util.logging.Level;
 
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.am.btree.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
 import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.storage.common.sync.LatchType;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class StorageManagerTest extends AbstractBTreeTest {
     private static final int PAGE_SIZE = 256;
     private static final int NUM_PAGES = 10;
     private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(32768);
+    private static final int HYRACKS_FRAME_SIZE = 32768;
 
     public class PinnedLatchedPage {
         public final ICachedPage page;
@@ -88,7 +86,9 @@
         private void pinRandomPage() {
             int pageId = Math.abs(rnd.nextInt() % maxPages);
 
-            LOGGER.info(workerId + " PINNING PAGE: " + pageId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " PINNING PAGE: " + pageId);
+            }
 
             try {
                 ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
@@ -102,14 +102,18 @@
                         break;
 
                     case FTA_READONLY: {
-                        LOGGER.info(workerId + " S LATCHING: " + pageId);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " S LATCHING: " + pageId);
+                        }
                         page.acquireReadLatch();
                         latch = LatchType.LATCH_S;
                     }
                         break;
 
                     case FTA_WRITEONLY: {
-                        LOGGER.info(workerId + " X LATCHING: " + pageId);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " X LATCHING: " + pageId);
+                        }
                         page.acquireWriteLatch();
                         latch = LatchType.LATCH_X;
                     }
@@ -117,11 +121,15 @@
 
                     case FTA_MIXED: {
                         if (rnd.nextInt() % 2 == 0) {
-                            LOGGER.info(workerId + " S LATCHING: " + pageId);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info(workerId + " S LATCHING: " + pageId);
+                            }
                             page.acquireReadLatch();
                             latch = LatchType.LATCH_S;
                         } else {
-                            LOGGER.info(workerId + " X LATCHING: " + pageId);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info(workerId + " X LATCHING: " + pageId);
+                            }
                             page.acquireWriteLatch();
                             latch = LatchType.LATCH_X;
                         }
@@ -144,14 +152,20 @@
 
                 if (plPage.latch != null) {
                     if (plPage.latch == LatchType.LATCH_S) {
-                        LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " S UNLATCHING: " + plPage.pageId);
+                        }
                         plPage.page.releaseReadLatch();
                     } else {
-                        LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info(workerId + " X UNLATCHING: " + plPage.pageId);
+                        }
                         plPage.page.releaseWriteLatch();
                     }
                 }
-                LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(workerId + " UNPINNING PAGE: " + plPage.pageId);
+                }
 
                 bufferCache.unpin(plPage.page);
                 pinnedPages.remove(index);
@@ -161,7 +175,9 @@
         }
 
         private void openFile() {
-            LOGGER.info(workerId + " OPENING FILE: " + fileId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " OPENING FILE: " + fileId);
+            }
             try {
                 bufferCache.openFile(fileId);
                 fileIsOpen = true;
@@ -171,7 +187,9 @@
         }
 
         private void closeFile() {
-            LOGGER.info(workerId + " CLOSING FILE: " + fileId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(workerId + " CLOSING FILE: " + fileId);
+            }
             try {
                 bufferCache.closeFile(fileId);
                 fileIsOpen = false;
@@ -188,7 +206,9 @@
             while (loopCount < maxLoopCount) {
                 loopCount++;
 
-                LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
+                }
 
                 if (fileIsOpen) {
 
@@ -256,4 +276,20 @@
 
         bufferCache.close();
     }
+    
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+    
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+    
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+    
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
+    }
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java
new file mode 100644
index 0000000..5d43ae1
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java
@@ -0,0 +1,152 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
+
+public class UpdateSearchTest extends AbstractBTreeTest {
+
+    // Update scan test on fixed-length tuples.
+    @Test
+    public void test01() throws Exception {
+        // declare fields
+        int fieldCount = 2;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+        cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+        MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+        ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+
+        IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+        BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        long start = System.currentTimeMillis();
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("INSERTING INTO TREE");
+        }
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference insertTuple = new ArrayTupleReference();
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+
+        int numInserts = 10000;
+        for (int i = 0; i < 10000; i++) {
+            int f0 = rnd.nextInt() % 10000;
+            int f1 = 5;
+            TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 10000 == 0) {
+                    long end = System.currentTimeMillis();
+                    LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+                }
+            }
+
+            try {
+                indexAccessor.insert(insertTuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("DURATION: " + duration);
+        }
+
+        // Update scan.
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("UPDATE SCAN:");
+        }
+        // Set the cursor to X latch nodes.
+        ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        indexAccessor.search(updateScanCursor, nullPred);
+        try {
+            while (updateScanCursor.hasNext()) {
+                updateScanCursor.next();
+                ITupleReference tuple = updateScanCursor.getTuple();
+                // Change the value field.
+                IntegerSerializerDeserializer.putInt(10, tuple.getFieldData(1), tuple.getFieldStart(1));
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            updateScanCursor.close();
+        }
+
+        // Ordered scan to verify the values.
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("ORDERED SCAN:");
+        }
+        // Set the cursor to X latch nodes.
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, true);
+        indexAccessor.search(scanCursor, nullPred);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference tuple = scanCursor.getTuple();
+                String rec = TupleUtils.printTuple(tuple, recDescSers);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+        btree.close();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
new file mode 100644
index 0000000..b40539f
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
+
+@SuppressWarnings("rawtypes")
+public class UpdateTest extends BTreeTestDriver {
+    private static final int numUpdateRounds = 3;
+    
+    @Override
+    protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
+        // This is a noop because we can only update non-key fields.
+        if (fieldSerdes.length == numKeys) {
+            return;
+        }
+        
+        BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
+
+        // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
+        if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+            BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
+        } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+            BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
+        }
+
+        int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numUpdateRounds);
+        for(int j = 0; j < numUpdateRounds; j++) {
+            BTreeTestUtils.updateTuples(testCtx, numTuplesPerDeleteRound, rnd);
+
+            BTreeTestUtils.checkPointSearches(testCtx);
+            BTreeTestUtils.checkOrderedScan(testCtx);
+            BTreeTestUtils.checkDiskOrderScan(testCtx);
+            BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
+            if (prefixLowKey != null && prefixHighKey != null) {
+                BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
+            }
+        }
+    }
+
+    @Override
+    protected String getTestOpName() {
+        return "Update";
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
new file mode 100644
index 0000000..9630a1b
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -0,0 +1,76 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractBTreeTest {
+    protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
+    public static final long RANDOM_SEED = 50;
+    
+    private static final int PAGE_SIZE = 256;
+    private static final int NUM_PAGES = 10;
+    private static final int MAX_OPEN_FILES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+        
+    protected IHyracksTaskContext ctx; 
+    protected IBufferCache bufferCache;
+    protected int btreeFileId;
+    
+    protected final Random rnd = new Random();
+    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final static String sep = System.getProperty("file.separator");
+    protected String fileName;   
+    
+    @Before
+    public void setUp() throws HyracksDataException {
+        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(getPageSize(), getNumPages(), getMaxOpenFiles());
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(fileName));
+        bufferCache.createFile(file);
+        btreeFileId = fmp.lookupFileId(file);
+        bufferCache.openFile(btreeFileId);
+        rnd.setSeed(RANDOM_SEED);
+    }
+    
+    @After
+    public void tearDown() throws HyracksDataException {
+        bufferCache.closeFile(btreeFileId);
+        bufferCache.close();
+        File f = new File(fileName);
+        f.deleteOnExit();
+    }
+    
+    public int getPageSize() {
+        return PAGE_SIZE;
+    }
+    
+    public int getNumPages() {
+        return NUM_PAGES;
+    }
+    
+    public int getHyracksFrameSize() {
+        return HYRACKS_FRAME_SIZE;
+    }
+    
+    public int getMaxOpenFiles() {
+        return MAX_OPEN_FILES;
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
new file mode 100644
index 0000000..f1b03c1
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.util.TreeSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public final class BTreeTestContext {
+    public final ISerializerDeserializer[] fieldSerdes;
+    public final IBufferCache bufferCache;
+    public final BTree btree;
+    public final IBTreeLeafFrame leafFrame;
+    public final IBTreeInteriorFrame interiorFrame;
+    public final ITreeIndexMetaDataFrame metaFrame;
+    public final ArrayTupleBuilder tupleBuilder;
+    public final ArrayTupleReference tuple = new ArrayTupleReference();
+    public final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
+    public final ITreeIndexAccessor indexAccessor;
+
+    public BTreeTestContext(IBufferCache bufferCache, ISerializerDeserializer[] fieldSerdes, BTree btree,
+            IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame,
+            ITreeIndexAccessor indexAccessor) {
+        this.bufferCache = bufferCache;
+        this.fieldSerdes = fieldSerdes;
+        this.btree = btree;
+        this.leafFrame = leafFrame;
+        this.interiorFrame = interiorFrame;
+        this.metaFrame = metaFrame;
+        this.indexAccessor = indexAccessor;
+        this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
+    }
+
+    public int getFieldCount() {
+        return fieldSerdes.length;
+    }
+
+    public int getKeyFieldCount() {
+        return btree.getMultiComparator().getKeyFieldCount();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
new file mode 100644
index 0000000..b5186b0
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
@@ -0,0 +1,505 @@
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.util.Iterator;
+import java.util.NavigableSet;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class BTreeTestUtils {
+    private static final Logger LOGGER = Logger.getLogger(BTreeTestUtils.class.getName());    
+    
+    public static BTreeTestContext createBTreeTestContext(IBufferCache bufferCache, int btreeFileId, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType) throws Exception {        
+        ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, fieldSerdes.length);
+        IBinaryComparator[] cmps = SerdeUtils.serdesToComparators(fieldSerdes, numKeyFields);
+        
+        BTree btree = BTreeUtils.createBTree(bufferCache, btreeFileId, typeTraits, cmps, leafType);
+        btree.create(btreeFileId);
+        btree.open(btreeFileId);
+        ITreeIndexAccessor indexAccessor = btree.createAccessor();
+        
+        IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
+        IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) btree.getInteriorFrameFactory().createFrame();
+        ITreeIndexMetaDataFrame metaFrame = btree.getFreePageManager().getMetaDataFrameFactory().createFrame();
+        BTreeTestContext testCtx = new BTreeTestContext(bufferCache, fieldSerdes, btree, leafFrame, interiorFrame, metaFrame, indexAccessor);
+        return testCtx;
+    }
+    
+    private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        for (int i = 0; i < fieldSerdes.length; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(
+                    actual.getFieldData(i), actual.getFieldStart(i),
+                    actual.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Object actualObj = fieldSerdes[i].deserialize(dataIn);            
+            if (!actualObj.equals(expected.get(i))) {
+                fail("Actual and expected fields do not match.\nExpected: " + expected.get(i) + "\nActual  : " + actualObj);
+            }
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    private static CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes, int numKeys) throws HyracksDataException {
+        CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, numKeys);
+        int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
+        for (int i = 0; i < fieldCount; i++) {
+            ByteArrayInputStream inStream = new ByteArrayInputStream(
+                    tuple.getFieldData(i), tuple.getFieldStart(i),
+                    tuple.getFieldLength(i));
+            DataInput dataIn = new DataInputStream(inStream);
+            Comparable fieldObj = (Comparable)fieldSerdes[i].deserialize(dataIn);
+            checkTuple.add(fieldObj);
+        }
+        return checkTuple;
+    }
+    
+    @SuppressWarnings("unchecked")
+    private static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+        int fieldCount = tupleBuilder.getFieldEndOffsets().length; 
+        DataOutput dos = tupleBuilder.getDataOutput();
+        tupleBuilder.reset();
+        for (int i = 0; i < fieldCount; i++) {
+            fieldSerdes[i].serialize(checkTuple.get(i), dos);
+            tupleBuilder.addFieldEndOffset();
+        }
+        tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+    }
+    
+    public static void checkOrderedScan(BTreeTestContext testCtx) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Ordered Scan.");
+        }
+        ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        testCtx.indexAccessor.search(scanCursor, nullPred);
+        Iterator<CheckTuple> checkIter = testCtx.checkTuples.iterator();
+        int actualCount = 0;
+        try {
+            while (scanCursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Ordered scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size());
+                }
+                scanCursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = scanCursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
+                actualCount++;
+            }
+            if (actualCount < testCtx.checkTuples.size()) {
+                fail("Ordered scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            scanCursor.close();
+        }
+    }
+    
+    public static void checkDiskOrderScan(BTreeTestContext testCtx) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Disk-Order Scan.");
+        }
+        ITreeIndexCursor diskOrderCursor = new TreeDiskOrderScanCursor(testCtx.leafFrame);
+        testCtx.indexAccessor.diskOrderScan(diskOrderCursor);
+        int actualCount = 0;        
+        try {
+            while (diskOrderCursor.hasNext()) {
+                diskOrderCursor.next();
+                ITupleReference tuple = diskOrderCursor.getTuple();
+                CheckTuple checkTuple = createCheckTupleFromTuple(tuple, testCtx.fieldSerdes, testCtx.btree.getMultiComparator().getKeyFieldCount());
+                if (!testCtx.checkTuples.contains(checkTuple)) {
+                    fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
+                }
+                actualCount++;
+            }
+            if (actualCount < testCtx.checkTuples.size()) {
+                fail("Disk-order scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+            if (actualCount > testCtx.checkTuples.size()) {
+                fail("Disk-order scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            diskOrderCursor.close();
+        }
+    }
+    
+    public static void checkRangeSearch(BTreeTestContext testCtx, ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Range Search.");
+        }
+        MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
+        MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
+        ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
+        testCtx.indexAccessor.search(searchCursor, rangePred);
+        // Get the subset of elements from the expected set within given key range.
+        CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, testCtx.fieldSerdes, lowKeyCmp.getKeyFieldCount());
+        CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, testCtx.fieldSerdes, highKeyCmp.getKeyFieldCount());
+        NavigableSet<CheckTuple> expectedSubset = null;
+        if (lowKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount() || 
+                highKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount()) {
+            // Searching on a key prefix (low key or high key or both).
+            expectedSubset = getPrefixExpectedSubset(testCtx.checkTuples, lowKeyCheck, highKeyCheck);
+        } else {
+            // Searching on all key fields.
+            expectedSubset = testCtx.checkTuples.subSet(lowKeyCheck, lowKeyInclusive, highKeyCheck, highKeyInclusive);
+        }
+        Iterator<CheckTuple> checkIter = expectedSubset.iterator();
+        int actualCount = 0;
+        try {
+            while (searchCursor.hasNext()) {
+                if (!checkIter.hasNext()) {
+                    fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
+                }
+                searchCursor.next();
+                CheckTuple expectedTuple = checkIter.next();
+                ITupleReference tuple = searchCursor.getTuple();
+                compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
+                actualCount++;
+            }
+            if (actualCount < expectedSubset.size()) {
+                fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size() + "\nActual  : " + actualCount);
+            }
+        } finally {
+            searchCursor.close();
+        }
+    }
+    
+    public static void checkPointSearches(BTreeTestContext testCtx) throws Exception {
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Testing Point Searches On All Expected Keys.");
+        }
+        ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
+        
+        ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference lowKey = new ArrayTupleReference();
+        ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference highKey = new ArrayTupleReference();
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, null, null);
+
+        // Iterate through expected tuples, and perform a point search in the BTree to verify the tuple can be reached.
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, testCtx.fieldSerdes);
+            createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, testCtx.fieldSerdes);
+            MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
+            MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
+                        
+            rangePred.setLowKey(lowKey, true);
+            rangePred.setHighKey(highKey, true);
+            rangePred.setLowKeyComparator(lowKeyCmp);
+            rangePred.setHighKeyComparator(highKeyCmp);
+            
+            testCtx.indexAccessor.search(searchCursor, rangePred);
+            
+            try {
+                // We expect exactly one answer.
+                if (searchCursor.hasNext()) {
+                    searchCursor.next();
+                    ITupleReference tuple = searchCursor.getTuple();
+                    compareActualAndExpected(tuple, checkTuple, testCtx.fieldSerdes);
+                }
+                if (searchCursor.hasNext()) {
+                    fail("Point search returned more than one answer.");
+                }
+            } finally {
+                searchCursor.close();
+            }
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    // Create a new TreeSet containing the elements satisfying the prefix search.
+    // Implementing prefix search by changing compareTo() in CheckTuple does not work.
+    public static TreeSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey, CheckTuple highKey) {
+        TreeSet<CheckTuple> expectedSubset = new TreeSet<CheckTuple>();
+        Iterator<CheckTuple> iter = checkTuples.iterator();
+        while(iter.hasNext()) {
+            CheckTuple t = iter.next();
+            boolean geLowKey = true;
+            boolean leHighKey = true;
+            for (int i = 0; i < lowKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(lowKey.get(i)) < 0) {
+                    geLowKey = false;
+                    break;
+                }
+            }
+            for (int i = 0; i < highKey.getNumKeys(); i++) {
+                if (t.get(i).compareTo(highKey.get(i)) > 0) {
+                    leHighKey = false;
+                    break;
+                }
+            }
+            if (geLowKey && leHighKey) {
+                expectedSubset.add(t);
+            }
+        }
+        return expectedSubset;
+    }
+    
+    public static void insertIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        int[] tupleValues = new int[testCtx.getFieldCount()];
+        // Scale range of values according to number of keys. 
+        // For example, for 2 keys we want the square root of numTuples, for 3 keys the cube root of numTuples, etc.        
+        int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                tupleValues[j] = rnd.nextInt() % maxValue;
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                tupleValues[j] = j;
+            }
+            TupleUtils.createIntegerTuple(testCtx.tupleBuilder, testCtx.tuple, tupleValues);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            try {
+                testCtx.indexAccessor.insert(testCtx.tuple);
+                // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
+                CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldCount, numKeyFields);
+                for(int v : tupleValues) {
+                    checkTuple.add(v);
+                }
+                testCtx.checkTuples.add(checkTuple);
+            } catch (BTreeDuplicateKeyException e) {
+                // Ignore duplicate key insertions.
+            }
+        }
+    }
+    
+    public static void insertStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        Object[] tupleValues = new Object[fieldCount];
+        for (int i = 0; i < numTuples; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                tupleValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                tupleValues[j] = getRandomString(5, rnd);
+            }
+            TupleUtils.createTuple(testCtx.tupleBuilder, testCtx.tuple, testCtx.fieldSerdes, tupleValues);
+            try {
+                testCtx.indexAccessor.insert(testCtx.tuple);
+                // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
+                CheckTuple<String> checkTuple = new CheckTuple<String>(fieldCount, numKeyFields);
+                for(Object v : tupleValues) {
+                    checkTuple.add((String)v);
+                }
+                testCtx.checkTuples.add(checkTuple);
+            } catch (BTreeDuplicateKeyException e) {
+                // Ignore duplicate key insertions.
+            }
+        }
+    }
+
+    public static void bulkLoadIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        int[] tupleValues = new int[testCtx.getFieldCount()];
+        int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                tupleValues[j] = rnd.nextInt() % maxValue;
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                tupleValues[j] = j;
+            }
+            
+            // Set expected values. We also use these as the pre-sorted stream for bulk loading.
+            CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldCount, numKeyFields);
+            for(int v : tupleValues) {
+                checkTuple.add(v);
+            }            
+            testCtx.checkTuples.add(checkTuple);
+        }
+        
+        bulkLoadCheckTuples(testCtx, numTuples);
+    }
+    
+    public static void bulkLoadStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.getFieldCount();
+        int numKeyFields = testCtx.getKeyFieldCount();
+        String[] tupleValues = new String[fieldCount];
+        for (int i = 0; i < numTuples; i++) {
+            // Set keys.
+            for (int j = 0; j < numKeyFields; j++) {
+                int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+                tupleValues[j] = getRandomString(length, rnd);
+            }
+            // Set values.
+            for (int j = numKeyFields; j < fieldCount; j++) {
+                tupleValues[j] = getRandomString(5, rnd);
+            }
+            // Set expected values. We also use these as the pre-sorted stream for bulk loading.
+            CheckTuple<String> checkTuple = new CheckTuple<String>(fieldCount, numKeyFields);
+            for(String v : tupleValues) {
+                checkTuple.add(v);
+            }            
+            testCtx.checkTuples.add(checkTuple);
+        }
+        
+        bulkLoadCheckTuples(testCtx, numTuples);
+    }
+    
+    private static void bulkLoadCheckTuples(BTreeTestContext testCtx, int numTuples) throws HyracksDataException, TreeIndexException, PageAllocationException {
+        int fieldCount = testCtx.getFieldCount();
+        ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference tuple = new ArrayTupleReference();
+        // Perform bulk load.
+        IIndexBulkLoadContext bulkLoadCtx = testCtx.btree.beginBulkLoad(0.7f);
+        int c = 1;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (c % (numTuples / 10) == 0) {
+                    LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+                }
+            }
+            createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, testCtx.fieldSerdes);
+            testCtx.btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
+            c++;
+        }
+        testCtx.btree.endBulkLoad(bulkLoadCtx);
+    }
+    
+    public static void deleteTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        ArrayTupleBuilder deleteTupleBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
+        ArrayTupleReference deleteTuple = new ArrayTupleReference();
+        int numCheckTuples = testCtx.checkTuples.size();        
+        // Copy CheckTuple references into array, so we can randomly pick from there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            checkTuples[idx++] = checkTuple;
+        }
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];            
+            createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, testCtx.fieldSerdes);          
+            testCtx.indexAccessor.delete(deleteTuple);
+            
+            // Remove check tuple from expected results.
+            testCtx.checkTuples.remove(checkTuple);
+            
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+    
+    @SuppressWarnings("unchecked")
+    public static void updateTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
+        int fieldCount = testCtx.btree.getFieldCount();
+        int keyFieldCount = testCtx.btree.getMultiComparator().getKeyFieldCount();
+        // This is a noop because we can only update non-key fields.
+        if (fieldCount == keyFieldCount) {
+            return;
+        }
+        ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
+        ArrayTupleReference updateTuple = new ArrayTupleReference();
+        int numCheckTuples = testCtx.checkTuples.size();
+        // Copy CheckTuple references into array, so we can randomly pick from there.
+        CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+        int idx = 0;
+        for (CheckTuple checkTuple : testCtx.checkTuples) {
+            checkTuples[idx++] = checkTuple;
+        }
+        for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+                    LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
+                }
+            }
+            int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+            CheckTuple checkTuple = checkTuples[checkTupleIdx];
+            // Update check tuple's non-key fields.
+            for (int j = keyFieldCount; j < fieldCount; j++) {
+                Comparable newValue = getRandomUpdateValue(testCtx.fieldSerdes[j], rnd);
+                checkTuple.set(j, newValue);
+            }
+            
+            createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, testCtx.fieldSerdes);            
+            testCtx.indexAccessor.update(updateTuple);
+            
+            // Swap with last "valid" CheckTuple.
+            CheckTuple tmp = checkTuples[numCheckTuples - 1];
+            checkTuples[numCheckTuples - 1] = checkTuple;
+            checkTuples[checkTupleIdx] = tmp;
+            numCheckTuples--;
+        }
+    }
+    
+    private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
+        if (serde instanceof IntegerSerializerDeserializer) {
+            return Integer.valueOf(rnd.nextInt());
+        } else if (serde instanceof UTF8StringSerializerDeserializer) {
+            return getRandomString(10, rnd);
+        }
+        return null;
+    }
+    
+    public static String getRandomString(int length, Random rnd) {
+        String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
new file mode 100644
index 0000000..f945ab9
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+@SuppressWarnings({"rawtypes", "unchecked"})
+public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
+    private final int numKeys;    
+    private final Comparable[] tuple;
+    private int pos;
+
+    public CheckTuple(int numFields, int numKeys) {
+        this.numKeys = numKeys;
+        this.tuple = new Comparable[numFields];
+        pos = 0;
+    }
+
+    public void add(T e) {
+        tuple[pos++] = e;
+    }
+
+    @Override
+    public int compareTo(T o) {
+        CheckTuple<T> other = (CheckTuple<T>)o;
+        for (int i = 0; i < numKeys; i++) {            
+            int cmp = tuple[i].compareTo(other.get(i));
+            if (cmp != 0) {
+                return cmp;
+            }
+        }
+        return 0;
+    }
+
+    public T get(int idx) {
+        return (T)tuple[idx];
+    }
+    
+    public void set(int idx, T e) {
+        tuple[idx] = e;
+    }
+    
+    public int size() {
+        return tuple.length;
+    }
+    
+    public int getNumKeys() {
+        return numKeys;
+    }
+    
+    @Override
+    public String toString() {
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < tuple.length; i++) {
+            strBuilder.append(tuple[i].toString());
+            if (i != tuple.length-1) {
+                strBuilder.append(" ");
+            }
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.project b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
deleted file mode 100644
index f60b2f9..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-invertedindex-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 375e12e..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 99b89a6..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jan 06 11:27:16 PST 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
index b9d63a8..50bea01 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
@@ -2,12 +2,12 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-storage-am-invertedindex-test</artifactId>
-  <version>0.1.9-SNAPSHOT</version>
+  <version>0.2.0-SNAPSHOT</version>
 
   <parent>
     <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.1.9-SNAPSHOT</version>
+    <version>0.2.0-SNAPSHOT</version>
   </parent>
 
   <build>
@@ -19,6 +19,7 @@
         <configuration>
           <source>1.6</source>
           <target>1.6</target>
+          <encoding>UTF-8</encoding>
         </configuration>
       </plugin>
     </plugins>
@@ -27,20 +28,20 @@
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-storage-am-invertedindex</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>test</scope>
   	</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
index 444db8b..7b6bd9f 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.hyracks.storage.am.invertedindex;
 
 import java.io.DataOutput;
@@ -10,20 +25,20 @@
 import org.junit.Before;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
@@ -43,6 +58,7 @@
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
 import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -53,7 +69,7 @@
     protected final int NUM_PAGES = 100;
     protected final int MAX_OPEN_FILES = 10;
     protected final int HYRACKS_FRAME_SIZE = 32768;
-    protected IHyracksStageletContext stageletCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    protected IHyracksTaskContext taskCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
     protected IBufferCache bufferCache;
     protected IFileMapProvider fmp;
@@ -64,17 +80,17 @@
     protected FileReference btreeFile = new FileReference(new File(btreeFileName));
     protected int btreeFileId;
 
-    // declare btree fields
-    protected int fieldCount = 5;
-    protected ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+    // declare token type traits
+    protected ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
+    protected ITypeTraits[] btreeTypeTraits = InvertedIndexUtils.getBTreeTypeTraits(tokenTypeTraits);
 
     // declare btree keys
     protected int btreeKeyFieldCount = 1;
     protected IBinaryComparator[] btreeBinCmps = new IBinaryComparator[btreeKeyFieldCount];
-    protected MultiComparator btreeCmp = new MultiComparator(typeTraits, btreeBinCmps);
+    protected MultiComparator btreeCmp = new MultiComparator(btreeBinCmps);
 
     // btree frame factories
-    protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+    protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
     protected ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
     protected ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
     protected ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
@@ -93,25 +109,25 @@
     protected int invListsFileId;
 
     protected int invListFields = 1;
-    protected ITypeTrait[] invListTypeTraits = new ITypeTrait[invListFields];
+    protected ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFields];
 
     protected int invListKeys = 1;
     protected IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
-    protected MultiComparator invListCmp = new MultiComparator(invListTypeTraits, invListBinCmps);
+    protected MultiComparator invListCmp = new MultiComparator(invListBinCmps);
 
     protected InvertedIndex invIndex;
 
     protected Random rnd = new Random();
 
-    protected ByteBuffer frame = stageletCtx.allocateFrame();
-    protected FrameTupleAppender appender = new FrameTupleAppender(stageletCtx.getFrameSize());
+    protected ByteBuffer frame = taskCtx.allocateFrame();
+    protected FrameTupleAppender appender = new FrameTupleAppender(taskCtx.getFrameSize());
     protected ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
     protected DataOutput dos = tb.getDataOutput();
 
     protected ISerializerDeserializer[] insertSerde = { UTF8StringSerializerDeserializer.INSTANCE,
             IntegerSerializerDeserializer.INSTANCE };
     protected RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
-    protected IFrameTupleAccessor accessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), insertRecDesc);
+    protected IFrameTupleAccessor accessor = new FrameTupleAccessor(taskCtx.getFrameSize(), insertRecDesc);
 
     protected FrameTupleReference tuple = new FrameTupleReference();
 
@@ -125,11 +141,11 @@
     protected ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
     protected RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
 
-    protected FrameTupleAppender queryAppender = new FrameTupleAppender(stageletCtx.getFrameSize());
+    protected FrameTupleAppender queryAppender = new FrameTupleAppender(taskCtx.getFrameSize());
     protected ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
     protected DataOutput queryDos = queryTb.getDataOutput();
 
-    protected IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), queryRecDesc);
+    protected IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(taskCtx.getFrameSize(), queryRecDesc);
     protected FrameTupleReference queryTuple = new FrameTupleReference();
 
     protected ITokenFactory tokenFactory;
@@ -140,13 +156,12 @@
 
     /**
      * Initialize members, generate data, and bulk load the inverted index.
-     * 
      */
     @Before
     public void start() throws Exception {
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(stageletCtx);
-        fmp = TestStorageManagerComponentHolder.getFileMapProvider(stageletCtx);
+        bufferCache = TestStorageManagerComponentHolder.getBufferCache(taskCtx);
+        fmp = TestStorageManagerComponentHolder.getFileMapProvider(taskCtx);
 
         // --- BTREE ---
 
@@ -154,23 +169,13 @@
         btreeFileId = fmp.lookupFileId(btreeFile);
         bufferCache.openFile(btreeFileId);
 
-        // token (key)
-        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-        // startPageId
-        typeTraits[1] = new TypeTrait(4);
-        // endPageId
-        typeTraits[2] = new TypeTrait(4);
-        // startOff
-        typeTraits[3] = new TypeTrait(4);
-        // numElements
-        typeTraits[4] = new TypeTrait(4);
-
-        btreeBinCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        btreeBinCmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
 
         freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, btreeCmp);
-        btree.create(btreeFileId, leafFrame, metaFrame);
+        btree = new BTree(bufferCache, btreeTypeTraits.length, btreeCmp, freePageManager, interiorFrameFactory,
+                leafFrameFactory);
+        btree.create(btreeFileId);
         btree.open(btreeFileId);
 
         // --- INVERTED INDEX ---
@@ -179,10 +184,10 @@
         invListsFileId = fmp.lookupFileId(invListsFile);
         bufferCache.openFile(invListsFileId);
 
-        invListTypeTraits[0] = new TypeTrait(4);
-        invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        invListBinCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
 
-        invIndex = new InvertedIndex(bufferCache, btree, invListCmp);
+        invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
         invIndex.open(invListsFileId);
 
         rnd.setSeed(50);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
index c4bc5b9..cc8ab15 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
@@ -22,19 +22,22 @@
 
 public abstract class AbstractInvIndexTest {
 
-    protected static final Logger LOGGER = Logger.getLogger(AbstractInvIndexTest.class.getName());
+	protected static final Logger LOGGER = Logger
+			.getLogger(AbstractInvIndexTest.class.getName());
 
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String baseFileName = tmpDir + sep + simpleDateFormat.format(new Date());
-    protected final static String btreeFileName = baseFileName + "btree";
-    protected final static String invListsFileName = baseFileName + "invlists";
+	protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
+			"ddMMyy-hhmmssSS");
+	protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+	protected final static String sep = System.getProperty("file.separator");
+	protected final static String baseFileName = tmpDir + sep
+			+ simpleDateFormat.format(new Date());
+	protected final static String btreeFileName = baseFileName + "btree";
+	protected final static String invListsFileName = baseFileName + "invlists";
 
-    public static void tearDown() {
-        File btreeFile = new File(btreeFileName);
-        btreeFile.deleteOnExit();
-        File invListsFile = new File(invListsFileName);
-        invListsFile.deleteOnExit();
-    }
+	public static void tearDown() {
+		File btreeFile = new File(btreeFileName);
+		btreeFile.deleteOnExit();
+		File invListsFile = new File(invListsFileName);
+		invListsFile.deleteOnExit();
+	}
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
index 2393098..93934ed 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
@@ -28,38 +28,36 @@
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
@@ -67,6 +65,7 @@
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListCursor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -78,14 +77,13 @@
     private static final int NUM_PAGES = 100;
     private static final int MAX_OPEN_FILES = 10;
     private static final int HYRACKS_FRAME_SIZE = 32768;
-    private IHyracksStageletContext stageletCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    private IHyracksTaskContext stageletCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
     /**
      * This test generates a list of <word-token, id> pairs which are pre-sorted
      * on the token. Those pairs for the input to an inverted-index bulk load.
      * The contents of the inverted lists are verified against the generated
      * data.
-     * 
      */
     @Test
     public void singleFieldPayloadTest() throws Exception {
@@ -105,53 +103,42 @@
         int invListsFileId = fmp.lookupFileId(invListsFile);
         bufferCache.openFile(invListsFileId);
 
-        // declare btree fields
-        int fieldCount = 5;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        // token (key)
-        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-        // startPageId
-        typeTraits[1] = new TypeTrait(4);
-        // endPageId
-        typeTraits[2] = new TypeTrait(4);
-        // startOff
-        typeTraits[3] = new TypeTrait(4);
-        // numElements
-        typeTraits[4] = new TypeTrait(4);
+        // Declare token type traits, and compute BTree type traits.
+        ITypeTraits[] tokenTypeTraits = new ITypeTraits[] { UTF8StringPointable.TYPE_TRAITS };
+        ITypeTraits[] btreeTypeTraits = InvertedIndexUtils.getBTreeTypeTraits(tokenTypeTraits);
 
         // declare btree keys
         int keyFieldCount = 1;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
 
-        MultiComparator btreeCmp = new MultiComparator(typeTraits, cmps);
+        MultiComparator btreeCmp = new MultiComparator(cmps);
 
-        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
         ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
         ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
 
         ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
-        ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
-        ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
 
-        BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, btreeCmp);
-        btree.create(btreeFileId, leafFrame, metaFrame);
+        BTree btree = new BTree(bufferCache, btreeTypeTraits.length, btreeCmp, freePageManager, interiorFrameFactory,
+                leafFrameFactory);
+        btree.create(btreeFileId);
         btree.open(btreeFileId);
 
         int invListFields = 1;
-        ITypeTrait[] invListTypeTraits = new ITypeTrait[invListFields];
-        invListTypeTraits[0] = new TypeTrait(4);
+        ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFields];
+        invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
 
         int invListKeys = 1;
         IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
-        invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        invListBinCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
 
-        MultiComparator invListCmp = new MultiComparator(invListTypeTraits, invListBinCmps);
+        MultiComparator invListCmp = new MultiComparator(invListBinCmps);
 
-        InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListCmp);
+        InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
         invIndex.open(invListsFileId);
 
         Random rnd = new Random();
@@ -227,7 +214,7 @@
 
         // ------- START VERIFICATION -----------
 
-        ITreeIndexCursor btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
+        ITreeIndexCursor btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame, false);
         FrameTupleReference searchKey = new FrameTupleReference();
         RangePredicate btreePred = new RangePredicate(true, searchKey, searchKey, true, true, btreeCmp, btreeCmp);
 
@@ -242,7 +229,7 @@
         IFrameTupleAccessor tokenAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), tokenRecDesc);
         tokenAccessor.reset(frame);
 
-        BTreeOpContext btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+        ITreeIndexAccessor btreeAccessor = invIndex.getBTree().createAccessor();
 
         // verify created inverted lists one-by-one
         for (int i = 0; i < tokens.size(); i++) {
@@ -257,7 +244,7 @@
 
             searchKey.reset(tokenAccessor, 0);
 
-            invIndex.openCursor(btreeCursor, btreePred, btreeOpCtx, invListCursor);
+            invIndex.openCursor(btreeCursor, btreePred, btreeAccessor, invListCursor);
 
             invListCursor.pinPagesSync();
             int checkIndex = 0;
@@ -292,7 +279,7 @@
 
             searchKey.reset(tokenAccessor, 0);
 
-            invIndex.openCursor(btreeCursor, btreePred, btreeOpCtx, invListCursor);
+            invIndex.openCursor(btreeCursor, btreePred, btreeAccessor, invListCursor);
 
             invListCursor.pinPagesSync();
             Assert.assertEquals(invListCursor.hasNext(), false);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
index 6e2ab22..9c7ec09 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
@@ -23,8 +23,8 @@
 
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAccessor;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAppender;
@@ -46,8 +46,8 @@
     public void singleFieldTest() throws Exception {
         ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE);
 
-        ITypeTrait[] fields = new TypeTrait[1];
-        fields[0] = new TypeTrait(4);
+        ITypeTraits[] fields = new ITypeTraits[1];
+        fields[0] = IntegerPointable.TYPE_TRAITS;
 
         FixedSizeFrameTupleAppender ftapp = new FixedSizeFrameTupleAppender(FRAME_SIZE, fields);
         FixedSizeFrameTupleAccessor ftacc = new FixedSizeFrameTupleAccessor(FRAME_SIZE, fields);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
index 83b1764..5f15a91 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
@@ -41,199 +41,207 @@
 
 public class NGramTokenizerTest {
 
-    private char PRECHAR = '#';
-    private char POSTCHAR = '$';
+	private char PRECHAR = '#';
+	private char POSTCHAR = '$';
 
-    private String str = "Jürgen S. Generic's Car";
-    private byte[] inputBuffer;
+	private String str = "Jürgen S. Generic's Car";
+	private byte[] inputBuffer;
 
-    private int gramLength = 3;
+	private int gramLength = 3;
 
-    private void getExpectedGrams(String s, int gramLength, ArrayList<String> grams, boolean prePost) {
+	private void getExpectedGrams(String s, int gramLength,
+			ArrayList<String> grams, boolean prePost) {
 
-        String tmp = s.toLowerCase();
-        if (prePost) {
-            StringBuilder preBuilder = new StringBuilder();
-            for (int i = 0; i < gramLength - 1; i++) {
-                preBuilder.append(PRECHAR);
-            }
-            String pre = preBuilder.toString();
+		String tmp = s.toLowerCase();
+		if (prePost) {
+			StringBuilder preBuilder = new StringBuilder();
+			for (int i = 0; i < gramLength - 1; i++) {
+				preBuilder.append(PRECHAR);
+			}
+			String pre = preBuilder.toString();
 
-            StringBuilder postBuilder = new StringBuilder();
-            for (int i = 0; i < gramLength - 1; i++) {
-                postBuilder.append(POSTCHAR);
-            }
-            String post = postBuilder.toString();
+			StringBuilder postBuilder = new StringBuilder();
+			for (int i = 0; i < gramLength - 1; i++) {
+				postBuilder.append(POSTCHAR);
+			}
+			String post = postBuilder.toString();
 
-            tmp = pre + s.toLowerCase() + post;
-        }
+			tmp = pre + s.toLowerCase() + post;
+		}
 
-        for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
-            String gram = tmp.substring(i, i + gramLength);
-            grams.add(gram);
-        }
-    }
+		for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
+			String gram = tmp.substring(i, i + gramLength);
+			grams.add(gram);
+		}
+	}
 
-    @Before
-    public void init() throws Exception {
-        // serialize string into bytes
-        ByteArrayOutputStream baos = new ByteArrayOutputStream();
-        DataOutput dos = new DataOutputStream(baos);
-        dos.writeUTF(str);
-        inputBuffer = baos.toByteArray();
-    }
+	@Before
+	public void init() throws Exception {
+		// serialize string into bytes
+		ByteArrayOutputStream baos = new ByteArrayOutputStream();
+		DataOutput dos = new DataOutputStream(baos);
+		dos.writeUTF(str);
+		inputBuffer = baos.toByteArray();
+	}
 
-    void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost) throws IOException {
-        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, false,
-                false, tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+	void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost)
+			throws IOException {
+		HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+				gramLength, prePost, false, false, tokenFactory);
+		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
-        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-        HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
-        for (String s : expectedGrams) {
-            Integer count = gramCounts.get(s);
-            if (count == null) {
-                count = 1;
-                gramCounts.put(s, count);
-            } else {
-                count++;
-            }
+		ArrayList<String> expectedGrams = new ArrayList<String>();
+		getExpectedGrams(str, gramLength, expectedGrams, prePost);
+		ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+		HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
+		for (String s : expectedGrams) {
+			Integer count = gramCounts.get(s);
+			if (count == null) {
+				count = 1;
+				gramCounts.put(s, count);
+			} else {
+				count++;
+			}
 
-            int hash = tokenHash(s, count);
-            expectedHashedGrams.add(hash);
-        }
+			int hash = tokenHash(s, count);
+			expectedHashedGrams.add(hash);
+		}
 
-        int tokenCount = 0;
+		int tokenCount = 0;
 
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
+		while (tokenizer.hasNext()) {
+			tokenizer.next();
 
-            // serialize hashed token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+			// serialize hashed token
+			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+			DataOutput tokenDos = new DataOutputStream(tokenBaos);
 
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+			IToken token = tokenizer.getToken();
+			token.serializeToken(tokenDos);
 
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
-            DataInput in = new DataInputStream(bais);
+			// deserialize token
+			ByteArrayInputStream bais = new ByteArrayInputStream(
+					tokenBaos.toByteArray());
+			DataInput in = new DataInputStream(bais);
 
-            Integer hashedGram = in.readInt();
+			Integer hashedGram = in.readInt();
 
-            // System.out.println(hashedGram);
+			// System.out.println(hashedGram);
 
-            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+			Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
 
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
+			tokenCount++;
+		}
+		// System.out.println("---------");
+	}
 
-    void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost) throws IOException {
-        HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
-                tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+	void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost)
+			throws IOException {
+		HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+				gramLength, prePost, true, false, tokenFactory);
+		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
-        ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
-        for (String s : expectedGrams) {
-            int hash = tokenHash(s, 1);
-            expectedHashedGrams.add(hash);
-        }
+		ArrayList<String> expectedGrams = new ArrayList<String>();
+		getExpectedGrams(str, gramLength, expectedGrams, prePost);
+		ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+		for (String s : expectedGrams) {
+			int hash = tokenHash(s, 1);
+			expectedHashedGrams.add(hash);
+		}
 
-        int tokenCount = 0;
+		int tokenCount = 0;
 
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
+		while (tokenizer.hasNext()) {
+			tokenizer.next();
 
-            // serialize hashed token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+			// serialize hashed token
+			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+			DataOutput tokenDos = new DataOutputStream(tokenBaos);
 
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+			IToken token = tokenizer.getToken();
+			token.serializeToken(tokenDos);
 
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
-            DataInput in = new DataInputStream(bais);
+			// deserialize token
+			ByteArrayInputStream bais = new ByteArrayInputStream(
+					tokenBaos.toByteArray());
+			DataInput in = new DataInputStream(bais);
 
-            Integer hashedGram = in.readInt();
+			Integer hashedGram = in.readInt();
 
-            // System.out.println(hashedGram);
+			// System.out.println(hashedGram);
 
-            Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+			Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
 
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
+			tokenCount++;
+		}
+		// System.out.println("---------");
+	}
 
-    void runTestNGramTokenizerWithUTF8Tokens(boolean prePost) throws IOException {
-        UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
-        NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(gramLength, prePost, true, false,
-                tokenFactory);
-        tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+	void runTestNGramTokenizerWithUTF8Tokens(boolean prePost)
+			throws IOException {
+		UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
+		NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+				gramLength, prePost, true, false, tokenFactory);
+		tokenizer.reset(inputBuffer, 0, inputBuffer.length);
 
-        ArrayList<String> expectedGrams = new ArrayList<String>();
-        getExpectedGrams(str, gramLength, expectedGrams, prePost);
+		ArrayList<String> expectedGrams = new ArrayList<String>();
+		getExpectedGrams(str, gramLength, expectedGrams, prePost);
 
-        int tokenCount = 0;
+		int tokenCount = 0;
 
-        while (tokenizer.hasNext()) {
-            tokenizer.next();
+		while (tokenizer.hasNext()) {
+			tokenizer.next();
 
-            // serialize hashed token
-            ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
-            DataOutput tokenDos = new DataOutputStream(tokenBaos);
+			// serialize hashed token
+			ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+			DataOutput tokenDos = new DataOutputStream(tokenBaos);
 
-            IToken token = tokenizer.getToken();
-            token.serializeToken(tokenDos);
+			IToken token = tokenizer.getToken();
+			token.serializeToken(tokenDos);
 
-            // deserialize token
-            ByteArrayInputStream bais = new ByteArrayInputStream(tokenBaos.toByteArray());
-            DataInput in = new DataInputStream(bais);
+			// deserialize token
+			ByteArrayInputStream bais = new ByteArrayInputStream(
+					tokenBaos.toByteArray());
+			DataInput in = new DataInputStream(bais);
 
-            String strGram = in.readUTF();
+			String strGram = in.readUTF();
 
-            // System.out.println("\"" + strGram + "\"");
+			// System.out.println("\"" + strGram + "\"");
 
-            Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
+			Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
 
-            tokenCount++;
-        }
-        // System.out.println("---------");
-    }
+			tokenCount++;
+		}
+		// System.out.println("---------");
+	}
 
-    @Test
-    public void testNGramTokenizerWithCountedHashedUTF8Tokens() throws Exception {
-        runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
-        runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
-    }
+	@Test
+	public void testNGramTokenizerWithCountedHashedUTF8Tokens()
+			throws Exception {
+		runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
+		runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
+	}
 
-    @Test
-    public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
-        runTestNGramTokenizerWithHashedUTF8Tokens(false);
-        runTestNGramTokenizerWithHashedUTF8Tokens(true);
-    }
+	@Test
+	public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
+		runTestNGramTokenizerWithHashedUTF8Tokens(false);
+		runTestNGramTokenizerWithHashedUTF8Tokens(true);
+	}
 
-    @Test
-    public void testNGramTokenizerWithUTF8Tokens() throws IOException {
-        runTestNGramTokenizerWithUTF8Tokens(false);
-        runTestNGramTokenizerWithUTF8Tokens(true);
-    }
+	@Test
+	public void testNGramTokenizerWithUTF8Tokens() throws IOException {
+		runTestNGramTokenizerWithUTF8Tokens(false);
+		runTestNGramTokenizerWithUTF8Tokens(true);
+	}
 
-    public int tokenHash(String token, int tokenCount) {
-        int h = AbstractUTF8Token.GOLDEN_RATIO_32;
-        for (int i = 0; i < token.length(); i++) {
-            h ^= token.charAt(i);
-            h *= AbstractUTF8Token.GOLDEN_RATIO_32;
-        }
-        return h + tokenCount;
-    }
+	public int tokenHash(String token, int tokenCount) {
+		int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+		for (int i = 0; i < token.length(); i++) {
+			h ^= token.charAt(i);
+			h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+		}
+		return h + tokenCount;
+	}
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
index 00533d0..1299fcb 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
@@ -17,6 +17,7 @@
 
 import java.util.ArrayList;
 import java.util.List;
+import java.util.logging.Level;
 
 import org.junit.Assert;
 import org.junit.Before;
@@ -27,6 +28,8 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
@@ -50,231 +53,262 @@
  */
 public class SearchPerfTest extends AbstractInvIndexSearchTest {
 
-    protected List<String> tokens = new ArrayList<String>();
+	protected List<String> tokens = new ArrayList<String>();
 
-    @Before
-    public void start() throws Exception {
-        super.start();
-        tokenFactory = new UTF8WordTokenFactory();
-        tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false, tokenFactory);
-        searcher = new TOccurrenceSearcher(stageletCtx, invIndex, tokenizer);
-        resultCursor = new SearchResultCursor(searcher.createResultFrameTupleAccessor(),
-                searcher.createResultTupleReference());
-        loadData();
-    }
+	@Before
+	public void start() throws Exception {
+		super.start();
+		tokenFactory = new UTF8WordTokenFactory();
+		tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
+				tokenFactory);
+		searcher = new TOccurrenceSearcher(taskCtx, invIndex, tokenizer);
+		resultCursor = new SearchResultCursor(
+				searcher.createResultFrameTupleAccessor(),
+				searcher.createResultTupleReference());
+		loadData();
+	}
 
-    public void loadData() throws HyracksDataException {
-        tokens.add("compilers");
-        tokens.add("computer");
-        tokens.add("databases");
-        tokens.add("fast");
-        tokens.add("hyracks");
-        tokens.add("major");
-        tokens.add("science");
-        tokens.add("systems");
-        tokens.add("university");
+	public void loadData() throws HyracksDataException, TreeIndexException, PageAllocationException {
+		tokens.add("compilers");
+		tokens.add("computer");
+		tokens.add("databases");
+		tokens.add("fast");
+		tokens.add("hyracks");
+		tokens.add("major");
+		tokens.add("science");
+		tokens.add("systems");
+		tokens.add("university");
 
-        for (int i = 0; i < tokens.size(); i++) {
-            checkInvLists.add(new ArrayList<Integer>());
-        }
+		for (int i = 0; i < tokens.size(); i++) {
+			checkInvLists.add(new ArrayList<Integer>());
+		}
 
-        // for generating length-skewed inverted lists
-        int addProb = 0;
-        int addProbStep = 10;
+		// for generating length-skewed inverted lists
+		int addProb = 0;
+		int addProbStep = 10;
 
-        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(invListBuilder, HYRACKS_FRAME_SIZE,
-                BTree.DEFAULT_FILL_FACTOR);
+		IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+				invListTypeTraits);
+		InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
+				invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
 
-        int totalElements = 0;
-        for (int i = 0; i < tokens.size(); i++) {
+		int totalElements = 0;
+		for (int i = 0; i < tokens.size(); i++) {
 
-            addProb += addProbStep * (i + 1);
-            for (int j = 0; j < maxId; j++) {
-                if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+			addProb += addProbStep * (i + 1);
+			for (int j = 0; j < maxId; j++) {
+				if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
 
-                    totalElements++;
+					totalElements++;
 
-                    tb.reset();
-                    UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
-                    tb.addFieldEndOffset();
-                    IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
-                    tb.addFieldEndOffset();
+					tb.reset();
+					UTF8StringSerializerDeserializer.INSTANCE.serialize(
+							tokens.get(i), dos);
+					tb.addFieldEndOffset();
+					IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+					tb.addFieldEndOffset();
 
-                    checkInvLists.get(i).add(j);
+					checkInvLists.get(i).add(j);
 
-                    appender.reset(frame, true);
-                    appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+					appender.reset(frame, true);
+					appender.append(tb.getFieldEndOffsets(), tb.getByteArray(),
+							0, tb.getSize());
 
-                    tuple.reset(accessor, 0);
+					tuple.reset(accessor, 0);
 
-                    try {
-                        invIndex.bulkLoadAddTuple(ctx, tuple);
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                    }
-                }
-            }
-        }
-        invIndex.endBulkLoad(ctx);
-    }
+					try {
+						invIndex.bulkLoadAddTuple(ctx, tuple);
+					} catch (Exception e) {
+						e.printStackTrace();
+					}
+				}
+			}
+		}
+		invIndex.endBulkLoad(ctx);
+	}
 
-    /**
-     * Determine the expected results with the ScanCount algorithm. The
-     * ScanCount algorithm is very simple, so we can be confident the results
-     * are correct.
-     * 
-     */
-    protected void fillExpectedResults(int[] queryTokenIndexes, int numQueryTokens, int occurrenceThreshold) {
-        // reset scan count array
-        for (int i = 0; i < maxId; i++) {
-            scanCountArray[i] = 0;
-        }
+	/**
+	 * Determine the expected results with the ScanCount algorithm. The
+	 * ScanCount algorithm is very simple, so we can be confident the results
+	 * are correct.
+	 * 
+	 */
+	protected void fillExpectedResults(int[] queryTokenIndexes,
+			int numQueryTokens, int occurrenceThreshold) {
+		// reset scan count array
+		for (int i = 0; i < maxId; i++) {
+			scanCountArray[i] = 0;
+		}
 
-        // count occurrences
-        for (int i = 0; i < numQueryTokens; i++) {
-            ArrayList<Integer> list = checkInvLists.get(queryTokenIndexes[i]);
-            for (int j = 0; j < list.size(); j++) {
-                scanCountArray[list.get(j)]++;
-            }
-        }
+		// count occurrences
+		for (int i = 0; i < numQueryTokens; i++) {
+			ArrayList<Integer> list = checkInvLists.get(queryTokenIndexes[i]);
+			for (int j = 0; j < list.size(); j++) {
+				scanCountArray[list.get(j)]++;
+			}
+		}
 
-        // check threshold
-        expectedResults.clear();
-        for (int i = 0; i < maxId; i++) {
-            if (scanCountArray[i] >= occurrenceThreshold) {
-                expectedResults.add(i);
-            }
-        }
-    }
+		// check threshold
+		expectedResults.clear();
+		for (int i = 0; i < maxId; i++) {
+			if (scanCountArray[i] >= occurrenceThreshold) {
+				expectedResults.add(i);
+			}
+		}
+	}
 
-    /**
-     * Generates a specified number of queries. Each query consists of a set of
-     * randomly chosen tokens that are picked from the pre-defined set of
-     * tokens. We run each query, measure it's time, and verify it's results
-     * against the results produced by ScanCount, implemented in
-     * fillExpectedResults().
-     * 
-     */
-    private void runQueries(IInvertedIndexSearchModifier searchModifier, int numQueries) throws Exception {
+	/**
+	 * Generates a specified number of queries. Each query consists of a set of
+	 * randomly chosen tokens that are picked from the pre-defined set of
+	 * tokens. We run each query, measure it's time, and verify it's results
+	 * against the results produced by ScanCount, implemented in
+	 * fillExpectedResults().
+	 * 
+	 */
+	private void runQueries(IInvertedIndexSearchModifier searchModifier,
+			int numQueries) throws Exception {
 
-        rnd.setSeed(50);
+		rnd.setSeed(50);
 
-        // generate random queries
-        int[] queryTokenIndexes = new int[tokens.size()];
-        for (int i = 0; i < numQueries; i++) {
+		// generate random queries
+		int[] queryTokenIndexes = new int[tokens.size()];
+		for (int i = 0; i < numQueries; i++) {
 
-            int numQueryTokens = Math.abs(rnd.nextInt() % tokens.size()) + 1;
-            for (int j = 0; j < numQueryTokens; j++) {
-                queryTokenIndexes[j] = Math.abs(rnd.nextInt() % tokens.size());
-            }
+			int numQueryTokens = Math.abs(rnd.nextInt() % tokens.size()) + 1;
+			for (int j = 0; j < numQueryTokens; j++) {
+				queryTokenIndexes[j] = Math.abs(rnd.nextInt() % tokens.size());
+			}
 
-            StringBuilder strBuilder = new StringBuilder();
-            for (int j = 0; j < numQueryTokens; j++) {
-                strBuilder.append(tokens.get(queryTokenIndexes[j]));
-                if (j + 1 != numQueryTokens)
-                    strBuilder.append(" ");
-            }
+			StringBuilder strBuilder = new StringBuilder();
+			for (int j = 0; j < numQueryTokens; j++) {
+				strBuilder.append(tokens.get(queryTokenIndexes[j]));
+				if (j + 1 != numQueryTokens)
+					strBuilder.append(" ");
+			}
 
-            String queryString = strBuilder.toString();
+			String queryString = strBuilder.toString();
 
-            queryTb.reset();
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString, queryDos);
-            queryTb.addFieldEndOffset();
+			queryTb.reset();
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+					queryDos);
+			queryTb.addFieldEndOffset();
 
-            queryAppender.reset(frame, true);
-            queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
-            queryTuple.reset(queryAccessor, 0);
+			queryAppender.reset(frame, true);
+			queryAppender.append(queryTb.getFieldEndOffsets(),
+					queryTb.getByteArray(), 0, queryTb.getSize());
+			queryTuple.reset(queryAccessor, 0);
 
-            boolean panic = false;
+			boolean panic = false;
 
-            int repeats = 1;
-            double totalTime = 0;
-            for (int j = 0; j < repeats; j++) {
-                long timeStart = System.currentTimeMillis();
-                try {
-                    searcher.reset();
-                    searcher.search(resultCursor, queryTuple, 0, searchModifier);
-                } catch (OccurrenceThresholdPanicException e) {
-                    panic = true;
-                }
-                long timeEnd = System.currentTimeMillis();
-                totalTime += timeEnd - timeStart;
-            }
-            double avgTime = totalTime / (double) repeats;
-            LOGGER.info(i + ": " + "\"" + queryString + "\": " + avgTime + "ms");
+			int repeats = 1;
+			double totalTime = 0;
+			for (int j = 0; j < repeats; j++) {
+				long timeStart = System.currentTimeMillis();
+				try {
+					searcher.reset();
+					searcher.search(resultCursor, queryTuple, 0, searchModifier);
+				} catch (OccurrenceThresholdPanicException e) {
+					panic = true;
+				}
+				long timeEnd = System.currentTimeMillis();
+				totalTime += timeEnd - timeStart;
+			}
+			double avgTime = totalTime / (double) repeats;
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info(i + ": " + "\"" + queryString + "\": " + avgTime
+						+ "ms");
+			}
 
-            if (!panic) {
+			if (!panic) {
 
-                fillExpectedResults(queryTokenIndexes, numQueryTokens, searcher.getOccurrenceThreshold());
+				fillExpectedResults(queryTokenIndexes, numQueryTokens,
+						searcher.getOccurrenceThreshold());
 
-                // verify results
-                int checkIndex = 0;
-                while (resultCursor.hasNext()) {
-                    resultCursor.next();
-                    ITupleReference resultTuple = resultCursor.getTuple();
-                    int id = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
-                            resultTuple.getFieldStart(0));
-                    Assert.assertEquals(expectedResults.get(checkIndex).intValue(), id);
-                    checkIndex++;
-                }
+				// verify results
+				int checkIndex = 0;
+				while (resultCursor.hasNext()) {
+					resultCursor.next();
+					ITupleReference resultTuple = resultCursor.getTuple();
+					int id = IntegerSerializerDeserializer.getInt(
+							resultTuple.getFieldData(0),
+							resultTuple.getFieldStart(0));
+					Assert.assertEquals(expectedResults.get(checkIndex)
+							.intValue(), id);
+					checkIndex++;
+				}
 
-                if (expectedResults.size() != checkIndex) {
-                    LOGGER.info("CHECKING");
-                    StringBuilder expectedStrBuilder = new StringBuilder();
-                    for (Integer x : expectedResults) {
-                        expectedStrBuilder.append(x + " ");
-                    }
-                    LOGGER.info(expectedStrBuilder.toString());
-                }
+				if (expectedResults.size() != checkIndex) {
+					if (LOGGER.isLoggable(Level.INFO)) {
+						LOGGER.info("CHECKING");
+					}
+					StringBuilder expectedStrBuilder = new StringBuilder();
+					for (Integer x : expectedResults) {
+						expectedStrBuilder.append(x + " ");
+					}
+					if (LOGGER.isLoggable(Level.INFO)) {
+						LOGGER.info(expectedStrBuilder.toString());
+					}
+				}
 
-                Assert.assertEquals(expectedResults.size(), checkIndex);
-            }
-        }
-    }
+				Assert.assertEquals(expectedResults.size(), checkIndex);
+			}
+		}
+	}
 
-    /**
-     * Runs 50 random conjunctive search queries to test the
-     * ConjunctiveSearchModifier.
-     * 
-     */
-    @Test
-    public void conjunctiveKeywordQueryTest() throws Exception {
-        IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
-        runQueries(searchModifier, 50);
-    }
+	/**
+	 * Runs 50 random conjunctive search queries to test the
+	 * ConjunctiveSearchModifier.
+	 * 
+	 */
+	@Test
+	public void conjunctiveKeywordQueryTest() throws Exception {
+		IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+		runQueries(searchModifier, 50);
+	}
 
-    /**
-     * Runs 50 random jaccard-based search queries with thresholds 1.0, 0.9,
-     * 0.8, 0.7, 0.6, 0.5. Tests the JaccardSearchModifier.
-     * 
-     */
-    @Test
-    public void jaccardKeywordQueryTest() throws Exception {
-        JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+	/**
+	 * Runs 50 random jaccard-based search queries with thresholds 1.0, 0.9,
+	 * 0.8, 0.7, 0.6, 0.5. Tests the JaccardSearchModifier.
+	 * 
+	 */
+	@Test
+	public void jaccardKeywordQueryTest() throws Exception {
+		JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
 
-        LOGGER.info("JACCARD: " + 1.0f);
-        searchModifier.setJaccThresh(1.0f);
-        runQueries(searchModifier, 50);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 1.0f);
+		}
+		searchModifier.setJaccThresh(1.0f);
+		runQueries(searchModifier, 50);
 
-        LOGGER.info("JACCARD: " + 0.9f);
-        searchModifier.setJaccThresh(0.9f);
-        runQueries(searchModifier, 50);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.9f);
+		}
+		searchModifier.setJaccThresh(0.9f);
+		runQueries(searchModifier, 50);
 
-        LOGGER.info("JACCARD: " + 0.8f);
-        searchModifier.setJaccThresh(0.8f);
-        runQueries(searchModifier, 50);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.8f);
+		}
+		searchModifier.setJaccThresh(0.8f);
+		runQueries(searchModifier, 50);
 
-        LOGGER.info("JACCARD: " + 0.7f);
-        searchModifier.setJaccThresh(0.7f);
-        runQueries(searchModifier, 50);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.7f);
+		}
+		searchModifier.setJaccThresh(0.7f);
+		runQueries(searchModifier, 50);
 
-        LOGGER.info("JACCARD: " + 0.6f);
-        searchModifier.setJaccThresh(0.6f);
-        runQueries(searchModifier, 50);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.6f);
+		}
+		searchModifier.setJaccThresh(0.6f);
+		runQueries(searchModifier, 50);
 
-        LOGGER.info("JACCARD: " + 0.5f);
-        searchModifier.setJaccThresh(0.5f);
-        runQueries(searchModifier, 50);
-    }
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.5f);
+		}
+		searchModifier.setJaccThresh(0.5f);
+		runQueries(searchModifier, 50);
+	}
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
index 9161e5a..eb2b39c 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.hyracks.storage.am.invertedindex;
 
 import java.io.DataOutputStream;
@@ -5,6 +20,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.logging.Level;
 
 import org.junit.Before;
 import org.junit.Test;
@@ -14,6 +30,8 @@
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
 import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
 import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
@@ -30,235 +48,261 @@
 
 public class SearchTest extends AbstractInvIndexSearchTest {
 
-    protected List<String> dataStrings = new ArrayList<String>();
-    protected List<String> firstNames = new ArrayList<String>();
-    protected List<String> lastNames = new ArrayList<String>();
+	protected List<String> dataStrings = new ArrayList<String>();
+	protected List<String> firstNames = new ArrayList<String>();
+	protected List<String> lastNames = new ArrayList<String>();
 
-    @Before
-    public void start() throws Exception {
-        super.start();
-        tokenFactory = new UTF8NGramTokenFactory();
-        tokenizer = new NGramUTF8StringBinaryTokenizer(3, false, true, false, tokenFactory);
-        searcher = new TOccurrenceSearcher(stageletCtx, invIndex, tokenizer);
-        resultCursor = new SearchResultCursor(searcher.createResultFrameTupleAccessor(),
-                searcher.createResultTupleReference());
-        generateDataStrings();
-        loadData();
-    }
+	@Before
+	public void start() throws Exception {
+		super.start();
+		tokenFactory = new UTF8NGramTokenFactory();
+		tokenizer = new NGramUTF8StringBinaryTokenizer(3, false, true, false,
+				tokenFactory);
+		searcher = new TOccurrenceSearcher(taskCtx, invIndex, tokenizer);
+		resultCursor = new SearchResultCursor(
+				searcher.createResultFrameTupleAccessor(),
+				searcher.createResultTupleReference());
+		generateDataStrings();
+		loadData();
+	}
 
-    public void generateDataStrings() {
-        firstNames.add("Kathrin");
-        firstNames.add("Cathrin");
-        firstNames.add("Kathryn");
-        firstNames.add("Cathryn");
-        firstNames.add("Kathrine");
-        firstNames.add("Cathrine");
-        firstNames.add("Kathryne");
-        firstNames.add("Cathryne");
-        firstNames.add("Katherin");
-        firstNames.add("Catherin");
-        firstNames.add("Katheryn");
-        firstNames.add("Catheryn");
-        firstNames.add("Katherine");
-        firstNames.add("Catherine");
-        firstNames.add("Katheryne");
-        firstNames.add("Catheryne");
-        firstNames.add("John");
-        firstNames.add("Jack");
-        firstNames.add("Jonathan");
-        firstNames.add("Nathan");
+	public void generateDataStrings() {
+		firstNames.add("Kathrin");
+		firstNames.add("Cathrin");
+		firstNames.add("Kathryn");
+		firstNames.add("Cathryn");
+		firstNames.add("Kathrine");
+		firstNames.add("Cathrine");
+		firstNames.add("Kathryne");
+		firstNames.add("Cathryne");
+		firstNames.add("Katherin");
+		firstNames.add("Catherin");
+		firstNames.add("Katheryn");
+		firstNames.add("Catheryn");
+		firstNames.add("Katherine");
+		firstNames.add("Catherine");
+		firstNames.add("Katheryne");
+		firstNames.add("Catheryne");
+		firstNames.add("John");
+		firstNames.add("Jack");
+		firstNames.add("Jonathan");
+		firstNames.add("Nathan");
 
-        lastNames.add("Miller");
-        lastNames.add("Myller");
-        lastNames.add("Keller");
-        lastNames.add("Ketler");
-        lastNames.add("Muller");
-        lastNames.add("Fuller");
-        lastNames.add("Smith");
-        lastNames.add("Smyth");
-        lastNames.add("Smithe");
-        lastNames.add("Smythe");
+		lastNames.add("Miller");
+		lastNames.add("Myller");
+		lastNames.add("Keller");
+		lastNames.add("Ketler");
+		lastNames.add("Muller");
+		lastNames.add("Fuller");
+		lastNames.add("Smith");
+		lastNames.add("Smyth");
+		lastNames.add("Smithe");
+		lastNames.add("Smythe");
 
-        // Generate all 'firstName lastName' combinations as data strings
-        for (String f : firstNames) {
-            for (String l : lastNames) {
-                dataStrings.add(f + " " + l);
-            }
-        }
-    }
+		// Generate all 'firstName lastName' combinations as data strings
+		for (String f : firstNames) {
+			for (String l : lastNames) {
+				dataStrings.add(f + " " + l);
+			}
+		}
+	}
 
-    private class TokenIdPair implements Comparable<TokenIdPair> {
-        public ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
-        public DataOutputStream dos = new DataOutputStream(baaos);
-        public int id;
+	private class TokenIdPair implements Comparable<TokenIdPair> {
+		public ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+		public DataOutputStream dos = new DataOutputStream(baaos);
+		public int id;
 
-        TokenIdPair(IToken token, int id) throws IOException {
-            token.serializeToken(dos);
-            this.id = id;
-        }
+		TokenIdPair(IToken token, int id) throws IOException {
+			token.serializeToken(dos);
+			this.id = id;
+		}
 
-        @Override
-        public int compareTo(TokenIdPair o) {
-            int cmp = btreeBinCmps[0].compare(baaos.getByteArray(), 0, baaos.getByteArray().length,
-                    o.baaos.getByteArray(), 0, o.baaos.getByteArray().length);
-            if (cmp == 0) {
-                return id - o.id;
-            } else {
-                return cmp;
-            }
-        }
-    }
+		@Override
+		public int compareTo(TokenIdPair o) {
+			int cmp = btreeBinCmps[0].compare(baaos.getByteArray(), 0,
+					baaos.getByteArray().length, o.baaos.getByteArray(), 0,
+					o.baaos.getByteArray().length);
+			if (cmp == 0) {
+				return id - o.id;
+			} else {
+				return cmp;
+			}
+		}
+	}
 
-    public void loadData() throws IOException {
-        List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
-        // generate pairs for subsequent sorting and bulk-loading
-        int id = 0;
-        for (String s : dataStrings) {
-            ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
-            DataOutputStream dos = new DataOutputStream(baaos);
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(s, dos);
-            tokenizer.reset(baaos.getByteArray(), 0, baaos.size());
-            int tokenCount = 0;
-            while (tokenizer.hasNext()) {
-                tokenizer.next();
-                IToken token = tokenizer.getToken();
-                pairs.add(new TokenIdPair(token, id));
-                ++tokenCount;
-            }
-            ++id;
-        }
-        Collections.sort(pairs);
+	public void loadData() throws IOException, TreeIndexException, PageAllocationException {
+		List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
+		// generate pairs for subsequent sorting and bulk-loading
+		int id = 0;
+		for (String s : dataStrings) {
+			ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+			DataOutputStream dos = new DataOutputStream(baaos);
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(s, dos);
+			tokenizer.reset(baaos.getByteArray(), 0, baaos.size());
+			int tokenCount = 0;
+			while (tokenizer.hasNext()) {
+				tokenizer.next();
+				IToken token = tokenizer.getToken();
+				pairs.add(new TokenIdPair(token, id));
+				++tokenCount;
+			}
+			++id;
+		}
+		Collections.sort(pairs);
 
-        // bulk load index
-        IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
-        InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(invListBuilder, HYRACKS_FRAME_SIZE,
-                BTree.DEFAULT_FILL_FACTOR);
+		// bulk load index
+		IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+				invListTypeTraits);
+		InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
+				invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
 
-        for (TokenIdPair t : pairs) {
-            tb.reset();
-            tb.addField(t.baaos.getByteArray(), 0, t.baaos.getByteArray().length);
-            IntegerSerializerDeserializer.INSTANCE.serialize(t.id, dos);
-            tb.addFieldEndOffset();
+		for (TokenIdPair t : pairs) {
+			tb.reset();
+			tb.addField(t.baaos.getByteArray(), 0,
+					t.baaos.getByteArray().length);
+			IntegerSerializerDeserializer.INSTANCE.serialize(t.id, dos);
+			tb.addFieldEndOffset();
 
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+			appender.reset(frame, true);
+			appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+					tb.getSize());
 
-            tuple.reset(accessor, 0);
+			tuple.reset(accessor, 0);
 
-            try {
-                invIndex.bulkLoadAddTuple(ctx, tuple);
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-        }
-        invIndex.endBulkLoad(ctx);
-    }
+			try {
+				invIndex.bulkLoadAddTuple(ctx, tuple);
+			} catch (Exception e) {
+				e.printStackTrace();
+			}
+		}
+		invIndex.endBulkLoad(ctx);
+	}
 
-    /**
-     * Runs a specified number of randomly picked strings from dataStrings as
-     * queries. We run each query, measure it's time, and print it's results.
-     * 
-     */
-    private void runQueries(IInvertedIndexSearchModifier searchModifier, int numQueries) throws Exception {
+	/**
+	 * Runs a specified number of randomly picked strings from dataStrings as
+	 * queries. We run each query, measure it's time, and print it's results.
+	 * 
+	 */
+	private void runQueries(IInvertedIndexSearchModifier searchModifier,
+			int numQueries) throws Exception {
 
-        rnd.setSeed(50);
+		rnd.setSeed(50);
 
-        for (int i = 0; i < numQueries; i++) {
+		for (int i = 0; i < numQueries; i++) {
 
-            int queryIndex = Math.abs(rnd.nextInt() % dataStrings.size());
-            String queryString = dataStrings.get(queryIndex);
+			int queryIndex = Math.abs(rnd.nextInt() % dataStrings.size());
+			String queryString = dataStrings.get(queryIndex);
 
-            queryTb.reset();
-            UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString, queryDos);
-            queryTb.addFieldEndOffset();
+			queryTb.reset();
+			UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+					queryDos);
+			queryTb.addFieldEndOffset();
 
-            queryAppender.reset(frame, true);
-            queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
-            queryTuple.reset(queryAccessor, 0);
+			queryAppender.reset(frame, true);
+			queryAppender.append(queryTb.getFieldEndOffsets(),
+					queryTb.getByteArray(), 0, queryTb.getSize());
+			queryTuple.reset(queryAccessor, 0);
 
-            int repeats = 1;
-            double totalTime = 0;
-            for (int j = 0; j < repeats; j++) {
-                long timeStart = System.currentTimeMillis();
-                try {
-                    searcher.reset();
-                    searcher.search(resultCursor, queryTuple, 0, searchModifier);
-                } catch (OccurrenceThresholdPanicException e) {
-                    // ignore panic queries
-                }
-                long timeEnd = System.currentTimeMillis();
-                totalTime += timeEnd - timeStart;
-            }
-            double avgTime = totalTime / (double) repeats;
-            StringBuilder strBuilder = new StringBuilder();
-            strBuilder.append(i + ": " + "\"" + queryString + "\": " + avgTime + "ms" + "\n");
-            strBuilder.append("CANDIDATE RESULTS:\n");
-            while (resultCursor.hasNext()) {
-                resultCursor.next();
-                ITupleReference resultTuple = resultCursor.getTuple();
-                int id = IntegerSerializerDeserializer
-                        .getInt(resultTuple.getFieldData(0), resultTuple.getFieldStart(0));
-                strBuilder.append(id + " " + dataStrings.get(id));
-                strBuilder.append('\n');
-            }
-            // remove trailing newline
-            strBuilder.deleteCharAt(strBuilder.length() - 1);
-            LOGGER.info(strBuilder.toString());
-        }
-    }
+			int repeats = 1;
+			double totalTime = 0;
+			for (int j = 0; j < repeats; j++) {
+				long timeStart = System.currentTimeMillis();
+				try {
+					searcher.reset();
+					searcher.search(resultCursor, queryTuple, 0, searchModifier);
+				} catch (OccurrenceThresholdPanicException e) {
+					// ignore panic queries
+				}
+				long timeEnd = System.currentTimeMillis();
+				totalTime += timeEnd - timeStart;
+			}
+			double avgTime = totalTime / (double) repeats;
+			StringBuilder strBuilder = new StringBuilder();
+			strBuilder.append(i + ": " + "\"" + queryString + "\": " + avgTime
+					+ "ms" + "\n");
+			strBuilder.append("CANDIDATE RESULTS:\n");
+			while (resultCursor.hasNext()) {
+				resultCursor.next();
+				ITupleReference resultTuple = resultCursor.getTuple();
+				int id = IntegerSerializerDeserializer.getInt(
+						resultTuple.getFieldData(0),
+						resultTuple.getFieldStart(0));
+				strBuilder.append(id + " " + dataStrings.get(id));
+				strBuilder.append('\n');
+			}
+			// remove trailing newline
+			strBuilder.deleteCharAt(strBuilder.length() - 1);
+			if (LOGGER.isLoggable(Level.INFO)) {
+				LOGGER.info(strBuilder.toString());
+			}
+		}
+	}
 
-    /**
-     * Runs 5 random conjunctive search queries to test the
-     * ConjunctiveSearchModifier.
-     * 
-     */
-    @Test
-    public void conjunctiveQueryTest() throws Exception {
-        IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
-        runQueries(searchModifier, 5);
-    }
+	/**
+	 * Runs 5 random conjunctive search queries to test the
+	 * ConjunctiveSearchModifier.
+	 * 
+	 */
+	@Test
+	public void conjunctiveQueryTest() throws Exception {
+		IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+		runQueries(searchModifier, 5);
+	}
 
-    /**
-     * Runs 5 random jaccard-based search queries with thresholds 0.9, 0.8, 0.7.
-     * Tests the JaccardSearchModifier.
-     * 
-     */
-    @Test
-    public void jaccardQueryTest() throws Exception {
-        JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+	/**
+	 * Runs 5 random jaccard-based search queries with thresholds 0.9, 0.8, 0.7.
+	 * Tests the JaccardSearchModifier.
+	 * 
+	 */
+	@Test
+	public void jaccardQueryTest() throws Exception {
+		JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
 
-        LOGGER.info("JACCARD: " + 0.9f);
-        searchModifier.setJaccThresh(0.9f);
-        runQueries(searchModifier, 5);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.9f);
+		}
+		searchModifier.setJaccThresh(0.9f);
+		runQueries(searchModifier, 5);
 
-        LOGGER.info("JACCARD: " + 0.8f);
-        searchModifier.setJaccThresh(0.8f);
-        runQueries(searchModifier, 5);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.8f);
+		}
+		searchModifier.setJaccThresh(0.8f);
+		runQueries(searchModifier, 5);
 
-        LOGGER.info("JACCARD: " + 0.7f);
-        searchModifier.setJaccThresh(0.7f);
-        runQueries(searchModifier, 5);
-    }
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("JACCARD: " + 0.7f);
+		}
+		searchModifier.setJaccThresh(0.7f);
+		runQueries(searchModifier, 5);
+	}
 
-    /**
-     * Runs 5 random edit-distance based search queries with thresholds 1, 2, 3.
-     * Tests the EditDistanceSearchModifier.
-     * 
-     */
-    @Test
-    public void editDistanceQueryTest() throws Exception {
-        EditDistanceSearchModifier searchModifier = new EditDistanceSearchModifier(3, 0);
+	/**
+	 * Runs 5 random edit-distance based search queries with thresholds 1, 2, 3.
+	 * Tests the EditDistanceSearchModifier.
+	 * 
+	 */
+	@Test
+	public void editDistanceQueryTest() throws Exception {
+		EditDistanceSearchModifier searchModifier = new EditDistanceSearchModifier(
+				3, 0);
 
-        LOGGER.info("EDIT DISTANCE: " + 1);
-        searchModifier.setEdThresh(1);
-        runQueries(searchModifier, 5);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 1);
+		}
+		searchModifier.setEdThresh(1);
+		runQueries(searchModifier, 5);
 
-        LOGGER.info("EDIT DISTANCE: " + 2);
-        searchModifier.setEdThresh(2);
-        runQueries(searchModifier, 5);
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 2);
+		}
+		searchModifier.setEdThresh(2);
+		runQueries(searchModifier, 5);
 
-        LOGGER.info("EDIT DISTANCE: " + 3);
-        searchModifier.setEdThresh(3);
-        runQueries(searchModifier, 5);
-    }
+		if (LOGGER.isLoggable(Level.INFO)) {
+			LOGGER.info("EDIT DISTANCE: " + 3);
+		}
+		searchModifier.setEdThresh(3);
+		runQueries(searchModifier, 5);
+	}
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.classpath b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
deleted file mode 100644
index f2cc5f7..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
+++ /dev/null
@@ -1,7 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
-	<classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
-	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
-	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
-	<classpathentry kind="output" path="target/classes"/>
-</classpath>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.project b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.project
deleted file mode 100644
index ea7e36b..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
-	<name>hyracks-storage-am-rtree-test</name>
-	<comment></comment>
-	<projects>
-	</projects>
-	<buildSpec>
-		<buildCommand>
-			<name>org.eclipse.jdt.core.javabuilder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-		<buildCommand>
-			<name>org.maven.ide.eclipse.maven2Builder</name>
-			<arguments>
-			</arguments>
-		</buildCommand>
-	</buildSpec>
-	<natures>
-		<nature>org.eclipse.jdt.core.javanature</nature>
-		<nature>org.maven.ide.eclipse.maven2Nature</nature>
-	</natures>
-</projectDescription>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100644
index 375e12e..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-#Fri May 20 19:34:07 PDT 2011
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs
deleted file mode 100644
index 99b89a6..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs
+++ /dev/null
@@ -1,9 +0,0 @@
-#Thu Jan 06 11:27:16 PST 2011
-activeProfiles=
-eclipse.preferences.version=1
-fullBuildGoals=process-test-resources
-includeModules=false
-resolveWorkspaceProjects=true
-resourceFilterGoals=process-resources resources\:testResources
-skipCompilerPlugin=true
-version=1
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index cdde071..bd1b82e 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -2,12 +2,12 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-storage-am-rtree-test</artifactId>
-  <version>0.1.9-SNAPSHOT</version>
+  <version>0.2.0-SNAPSHOT</version>
 
   <parent>
     <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.1.9-SNAPSHOT</version>
+    <version>0.2.0-SNAPSHOT</version>
   </parent>
 
   <build>
@@ -34,20 +34,20 @@
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-storage-am-rtree</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>test</scope>
   	</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
index b2ab775..8f619a4 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
@@ -18,23 +18,23 @@
 import java.io.File;
 import java.text.SimpleDateFormat;
 import java.util.Date;
+import java.util.logging.Logger;
 
 import org.junit.AfterClass;
 
 public abstract class AbstractRTreeTest {
 
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+	protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeTest.class.getName());
+	protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
+			"ddMMyy-hhmmssSS");
+	protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+	protected final static String sep = System.getProperty("file.separator");
+	protected final static String fileName = tmpDir + sep
+			+ simpleDateFormat.format(new Date());
 
-    protected void print(String str) {
-        System.err.print(str);
-    }
-
-    @AfterClass
-    public static void cleanup() throws Exception {
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
+	@AfterClass
+	public static void cleanup() throws Exception {
+		File f = new File(fileName);
+		f.deleteOnExit();
+	}
 }
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
index 9659553..9fe2c94 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
@@ -19,28 +19,31 @@
 import java.io.File;
 import java.nio.ByteBuffer;
 import java.util.Random;
+import java.util.logging.Level;
 
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
@@ -48,30 +51,26 @@
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
 import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.IntegerPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
 import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class RTreeTest extends AbstractRTreeTest {
-
     private static final int PAGE_SIZE = 256;
     private static final int NUM_PAGES = 10;
     private static final int MAX_OPEN_FILES = 10;
     private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
     // create an R-tree of two dimensions
     // fill the R-tree with random values using insertions
@@ -90,36 +89,34 @@
         // declare keys
         int keyFieldCount = 4;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
         cmps[1] = cmps[0];
         cmps[2] = cmps[0];
         cmps[3] = cmps[0];
 
         // declare tuple fields
         int fieldCount = 7;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(8);
-        typeTraits[1] = new TypeTrait(8);
-        typeTraits[2] = new TypeTrait(8);
-        typeTraits[3] = new TypeTrait(8);
-        typeTraits[4] = new TypeTrait(8);
-        typeTraits[5] = new TypeTrait(4);
-        typeTraits[6] = new TypeTrait(8);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = DoublePointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
 
-        // declare value providers
-        IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-        valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
-        valueProviders[1] = valueProviders[0];
-        valueProviders[2] = valueProviders[0];
-        valueProviders[3] = valueProviders[0];
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmps.length, DoublePointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps, valueProviders);
+        MultiComparator cmp = new MultiComparator(cmps);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                keyFieldCount);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -127,13 +124,13 @@
         IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        rtree.create(fileId, leafFrame, metaFrame);
+        RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        rtree.create(fileId);
         rtree.open(fileId);
 
         ByteBuffer hyracksFrame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         @SuppressWarnings("rawtypes")
@@ -146,7 +143,7 @@
         accessor.reset(hyracksFrame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -185,36 +182,40 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                        + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                rtree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
 
-        // rtree.printTree(leafFrame, interiorFrame, recDescSers);
-        // System.err.println();
-
         String rtreeStats = rtree.printStats();
-        print(rtreeStats);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(rtreeStats);
+        }
 
         // disk-order scan
-        print("DISK-ORDER SCAN:\n");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("DISK-ORDER SCAN:");
+        }
         TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
-        RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
-        rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+        indexAccessor.diskOrderScan(diskOrderCursor);
         try {
             while (diskOrderCursor.hasNext()) {
                 diskOrderCursor.next();
                 ITupleReference frameTuple = diskOrderCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -226,7 +227,9 @@
                 rtree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
         String string = stats.toString();
-        System.err.println(string);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(string);
+        }
 
         rtree.close();
         bufferCache.closeFile(fileId);
@@ -251,36 +254,34 @@
         // declare keys
         int keyFieldCount = 4;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
         cmps[1] = cmps[0];
         cmps[2] = cmps[0];
         cmps[3] = cmps[0];
 
         // declare tuple fields
         int fieldCount = 7;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(8);
-        typeTraits[1] = new TypeTrait(8);
-        typeTraits[2] = new TypeTrait(8);
-        typeTraits[3] = new TypeTrait(8);
-        typeTraits[4] = new TypeTrait(8);
-        typeTraits[5] = new TypeTrait(4);
-        typeTraits[6] = new TypeTrait(8);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = DoublePointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
 
-        // declare value providers
-        IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-        valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
-        valueProviders[1] = valueProviders[0];
-        valueProviders[2] = valueProviders[0];
-        valueProviders[3] = valueProviders[0];
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmps.length, DoublePointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps, valueProviders);
+        MultiComparator cmp = new MultiComparator(cmps);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                keyFieldCount);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -288,13 +289,13 @@
         IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        rtree.create(fileId, leafFrame, metaFrame);
+        RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        rtree.create(fileId);
         rtree.open(fileId);
 
         ByteBuffer hyracksFrame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         @SuppressWarnings("rawtypes")
@@ -307,7 +308,7 @@
         accessor.reset(hyracksFrame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -344,26 +345,26 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                        + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                rtree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
 
-        // rtree.printTree(leafFrame, interiorFrame, recDescSers);
-        // System.err.println();
-
         String rtreeStats = rtree.printStats();
-        print(rtreeStats);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(rtreeStats);
+        }
 
-        RTreeOpContext deleteOpCtx = rtree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
         rnd.setSeed(50);
         for (int i = 0; i < 5000; i++) {
 
@@ -397,14 +398,15 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                        + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                rtree.delete(tuple, deleteOpCtx);
-
+                indexAccessor.delete(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -415,7 +417,9 @@
                 rtree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
         String string = stats.toString();
-        System.err.println(string);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(string);
+        }
 
         rtree.close();
         bufferCache.closeFile(fileId);
@@ -440,7 +444,7 @@
         // declare keys
         int keyFieldCount = 6;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
         cmps[1] = cmps[0];
         cmps[2] = cmps[0];
         cmps[3] = cmps[0];
@@ -449,33 +453,29 @@
 
         // declare tuple fields
         int fieldCount = 9;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(8);
-        typeTraits[1] = new TypeTrait(8);
-        typeTraits[2] = new TypeTrait(8);
-        typeTraits[3] = new TypeTrait(8);
-        typeTraits[4] = new TypeTrait(8);
-        typeTraits[5] = new TypeTrait(8);
-        typeTraits[6] = new TypeTrait(8);
-        typeTraits[7] = new TypeTrait(4);
-        typeTraits[8] = new TypeTrait(8);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = DoublePointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
+        typeTraits[7] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[8] = DoublePointable.TYPE_TRAITS;
 
-        // declare value providers
-        IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-        valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
-        valueProviders[1] = valueProviders[0];
-        valueProviders[2] = valueProviders[0];
-        valueProviders[3] = valueProviders[0];
-        valueProviders[4] = valueProviders[0];
-        valueProviders[5] = valueProviders[0];
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmps.length, DoublePointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps, valueProviders);
+        MultiComparator cmp = new MultiComparator(cmps);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                keyFieldCount);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -483,13 +483,13 @@
         IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        rtree.create(fileId, leafFrame, metaFrame);
+        RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        rtree.create(fileId);
         rtree.open(fileId);
 
         ByteBuffer hyracksFrame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         @SuppressWarnings("rawtypes")
@@ -503,7 +503,7 @@
         accessor.reset(hyracksFrame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -546,36 +546,41 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.min(p1z, p2z)
-                        + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " " + Math.max(p1z, p2z) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.min(p1z, p2z) + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
+                            + Math.max(p1z, p2z));
+                }
             }
 
             try {
-                rtree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
 
-        // rtree.printTree(leafFrame, interiorFrame, recDescSers);
-        // System.err.println();
-
         String rtreeStats = rtree.printStats();
-        print(rtreeStats);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(rtreeStats);
+        }
 
         // disk-order scan
-        print("DISK-ORDER SCAN:\n");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("DISK-ORDER SCAN:");
+        }
         TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
-        RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
-        rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+        indexAccessor.diskOrderScan(diskOrderCursor);
         try {
             while (diskOrderCursor.hasNext()) {
                 diskOrderCursor.next();
                 ITupleReference frameTuple = diskOrderCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -587,7 +592,9 @@
                 rtree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
         String string = stats.toString();
-        System.err.println(string);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(string);
+        }
 
         rtree.close();
         bufferCache.closeFile(fileId);
@@ -612,36 +619,34 @@
         // declare keys
         int keyFieldCount = 4;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
         cmps[1] = cmps[0];
         cmps[2] = cmps[0];
         cmps[3] = cmps[0];
 
         // declare tuple fields
         int fieldCount = 7;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(4);
-        typeTraits[1] = new TypeTrait(4);
-        typeTraits[2] = new TypeTrait(4);
-        typeTraits[3] = new TypeTrait(4);
-        typeTraits[4] = new TypeTrait(8);
-        typeTraits[5] = new TypeTrait(4);
-        typeTraits[6] = new TypeTrait(8);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
 
-        // declare value providers
-        IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-        valueProviders[0] = IntegerPrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
-        valueProviders[1] = valueProviders[0];
-        valueProviders[2] = valueProviders[0];
-        valueProviders[3] = valueProviders[0];
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmps.length, IntegerPointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps, valueProviders);
+        MultiComparator cmp = new MultiComparator(cmps);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                keyFieldCount);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -649,13 +654,13 @@
         IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        rtree.create(fileId, leafFrame, metaFrame);
+        RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        rtree.create(fileId);
         rtree.open(fileId);
 
         ByteBuffer hyracksFrame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         @SuppressWarnings("rawtypes")
@@ -668,7 +673,7 @@
         accessor.reset(hyracksFrame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -707,36 +712,40 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                        + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                rtree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
 
-        // rtree.printTree(leafFrame, interiorFrame, recDescSers);
-        // System.err.println();
-
         String rtreeStats = rtree.printStats();
-        print(rtreeStats);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(rtreeStats);
+        }
 
         // disk-order scan
-        print("DISK-ORDER SCAN:\n");
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("DISK-ORDER SCAN:");
+        }
         TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
-        RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
-        rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+        indexAccessor.diskOrderScan(diskOrderCursor);
         try {
             while (diskOrderCursor.hasNext()) {
                 diskOrderCursor.next();
                 ITupleReference frameTuple = diskOrderCursor.getTuple();
-                String rec = cmp.printTuple(frameTuple, recDescSers);
-                print(rec + "\n");
+                String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(rec);
+                }
             }
         } catch (Exception e) {
             e.printStackTrace();
@@ -748,11 +757,13 @@
                 rtree.getRootPageId());
         TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
         String string = stats.toString();
-        System.err.println(string);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(string);
+        }
 
         rtree.close();
         bufferCache.closeFile(fileId);
         bufferCache.close();
 
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
index 68db811..a232e37 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
@@ -23,27 +23,30 @@
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.Random;
+import java.util.logging.Level;
 
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
 import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
@@ -51,18 +54,16 @@
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.DoublePrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
 import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
 import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
 import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
@@ -73,7 +74,7 @@
     private static final int NUM_PAGES = 10;
     private static final int MAX_OPEN_FILES = 10;
     private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
 
     // create an R-tree of two dimensions
     // fill the R-tree with random values using insertions
@@ -92,34 +93,32 @@
         // declare keys
         int keyFieldCount = 4;
         IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
-        cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
         cmps[1] = cmps[0];
         cmps[2] = cmps[0];
         cmps[3] = cmps[0];
 
         // declare tuple fields
         int fieldCount = 5;
-        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-        typeTraits[0] = new TypeTrait(8);
-        typeTraits[1] = new TypeTrait(8);
-        typeTraits[2] = new TypeTrait(8);
-        typeTraits[3] = new TypeTrait(8);
-        typeTraits[4] = new TypeTrait(4);
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
 
-        // declare value providers
-        IPrimitiveValueProvider[] valueProviders = new IPrimitiveValueProvider[keyFieldCount];
-        valueProviders[0] = DoublePrimitiveValueProviderFactory.INSTANCE.createPrimitiveValueProvider();
-        valueProviders[1] = valueProviders[0];
-        valueProviders[2] = valueProviders[0];
-        valueProviders[3] = valueProviders[0];
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                cmps.length, DoublePointable.FACTORY);
 
-        MultiComparator cmp = new MultiComparator(typeTraits, cmps, valueProviders);
+        MultiComparator cmp = new MultiComparator(cmps);
 
         RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
 
         ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
-                keyFieldCount);
-        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+                valueProviderFactories);
+        ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+                valueProviderFactories);
         ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
         ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
 
@@ -127,13 +126,13 @@
         IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
         IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
 
-        RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
-        rtree.create(fileId, leafFrame, metaFrame);
+        RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+        rtree.create(fileId);
         rtree.open(fileId);
 
         ByteBuffer hyracksFrame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
-        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
         DataOutput dos = tb.getDataOutput();
 
         @SuppressWarnings("rawtypes")
@@ -145,7 +144,7 @@
         accessor.reset(hyracksFrame);
         FrameTupleReference tuple = new FrameTupleReference();
 
-        RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+        ITreeIndexAccessor indexAccessor = rtree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -175,13 +174,15 @@
 
             tuple.reset(accessor, 0);
 
-            if (i % 1000 == 0) {
-                print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                        + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                rtree.insert(tuple, insertOpCtx);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -213,14 +214,15 @@
 
             tuple.reset(accessor, 0);
 
-            print(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
-                    + " " + Math.max(p1y, p2y) + "\n");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+            }
 
             ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
             SearchPredicate searchPredicate = new SearchPredicate(tuple, cmp);
 
-            RTreeOpContext searchOpCtx = rtree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, metaFrame);
-            rtree.search(searchCursor, searchPredicate, searchOpCtx);
+            indexAccessor.search(searchCursor, searchPredicate);
 
             ArrayList<Integer> results = new ArrayList<Integer>();
             try {
@@ -238,12 +240,13 @@
             } finally {
                 searchCursor.close();
             }
-
-            System.err.println("There are " + results.size() + " objects that satisfy the query");
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("There are " + results.size() + " objects that satisfy the query");
+            }
         }
 
         rtree.close();
         bufferCache.closeFile(fileId);
         bufferCache.close();
     }
-}
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index 50bd3bc..cbe34ef 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -2,12 +2,12 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-storage-common-test</artifactId>
-  <version>0.1.9-SNAPSHOT</version>
+  <version>0.2.0-SNAPSHOT</version>
 
   <parent>
     <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>hyracks-tests</artifactId>
-    <version>0.1.9-SNAPSHOT</version>
+    <version>0.2.0-SNAPSHOT</version>
   </parent>
 
   <build>
@@ -34,19 +34,19 @@
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-control-nc</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-storage-common</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<scope>compile</scope>
   	</dependency>
   	<dependency>
   		<groupId>edu.uci.ics.hyracks</groupId>
   		<artifactId>hyracks-test-support</artifactId>
-  		<version>0.1.9-SNAPSHOT</version>
+  		<version>0.2.0-SNAPSHOT</version>
   		<type>jar</type>
   		<scope>test</scope>
   	</dependency>
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
new file mode 100644
index 0000000..fff9e31
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
@@ -0,0 +1,180 @@
+package edu.uci.ics.hyracks.storage.common;
+
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileHandle;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IIOManager;
+import edu.uci.ics.hyracks.api.io.IIOManager.FileReadWriteMode;
+import edu.uci.ics.hyracks.api.io.IIOManager.FileSyncMode;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BufferCacheRegressionTests {
+	protected static final String tmpDir = System.getProperty("java.io.tmpdir");
+	protected static final String sep = System.getProperty("file.separator");
+
+	protected String fileName = tmpDir + sep + "flushTestFile";
+
+	private static final int PAGE_SIZE = 256;
+	private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
+	private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+	// We want to test the following behavior when reclaiming a file slot in the
+	// buffer cache:
+	// 1. If the file being evicted was deleted, then its dirty pages should be
+	// invalidated, but most not be flushed.
+	// 2. If the file was not deleted, then we must flush its dirty pages.
+	@Test
+	public void testFlushBehaviorOnFileEviction() throws IOException {
+		File f = new File(fileName);
+		if (f.exists()) {
+			f.delete();
+		}
+		flushBehaviorTest(true);
+		flushBehaviorTest(false);
+	}
+
+	private void flushBehaviorTest(boolean deleteFile) throws IOException {
+		TestStorageManagerComponentHolder.init(PAGE_SIZE, 10, 1);
+
+		IBufferCache bufferCache = TestStorageManagerComponentHolder
+				.getBufferCache(ctx);
+		IFileMapProvider fmp = TestStorageManagerComponentHolder
+				.getFileMapProvider(ctx);
+
+		FileReference firstFileRef = new FileReference(new File(fileName));
+		bufferCache.createFile(firstFileRef);
+		int firstFileId = fmp.lookupFileId(firstFileRef);
+		bufferCache.openFile(firstFileId);
+
+		// Fill the first page with known data and make it dirty by write
+		// latching it.
+		ICachedPage writePage = bufferCache.pin(
+				BufferedFileHandle.getDiskPageId(firstFileId, 0), true);
+		writePage.acquireWriteLatch();
+		try {
+			ByteBuffer buf = writePage.getBuffer();
+			for (int i = 0; i < buf.capacity(); i++) {
+				buf.put(Byte.MAX_VALUE);
+			}
+		} finally {
+			writePage.releaseWriteLatch();
+			bufferCache.unpin(writePage);
+		}
+		bufferCache.closeFile(firstFileId);
+		if (deleteFile) {
+			bufferCache.deleteFile(firstFileId, false);
+		}
+
+		// Create a file with the same name.
+		FileReference secondFileRef = new FileReference(new File(fileName));
+		bufferCache.createFile(secondFileRef);
+		int secondFileId = fmp.lookupFileId(secondFileRef);
+
+		// This open will replace the firstFileRef's slot in the BufferCache,
+		// causing it's pages to be cleaned up. We want to make sure that those
+		// dirty pages are not flushed to the disk, because the file was
+		// declared as deleted, and
+		// somebody might be already using the same filename again (having been
+		// assigned a different fileId).
+		bufferCache.openFile(secondFileId);
+
+		// Manually open the file and inspect it's contents. We cannot simply
+		// ask the BufferCache to pin the page, because it would return the same
+		// physical memory again, and for performance reasons pages are never
+		// reset with 0's.
+		IIOManager ioManager = ctx.getIOManager();
+		FileReference testFileRef = new FileReference(new File(fileName));
+		FileHandle testFileHandle = new FileHandle(testFileRef);
+		testFileHandle.open(FileReadWriteMode.READ_ONLY,
+				FileSyncMode.METADATA_SYNC_DATA_SYNC);
+		ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE);
+		ioManager.syncRead(testFileHandle, 0, testBuffer);
+		for (int i = 0; i < testBuffer.capacity(); i++) {
+			if (deleteFile) {
+				// We deleted the file. We expect to see a clean buffer.
+				if (testBuffer.get(i) == Byte.MAX_VALUE) {
+					fail("Page 0 of deleted file was fazily flushed in openFile(), "
+							+ "corrupting the data of a newly created file with the same name.");
+				}
+			} else {
+				// We didn't delete the file. We expect to see a buffer full of
+				// Byte.MAX_VALUE.
+				if (testBuffer.get(i) != Byte.MAX_VALUE) {
+					fail("Page 0 of closed file was not flushed when properly, when reclaiming the file slot of fileId 0 in the BufferCache.");
+				}
+			}
+		}
+		testFileHandle.close();
+		bufferCache.closeFile(secondFileId);
+		if (deleteFile) {
+			bufferCache.deleteFile(secondFileId, false);
+		}
+		bufferCache.close();
+	}
+
+	// Tests the behavior of the BufferCache when more than all pages are
+	// pinned. We expect an exception.
+	@Test
+	public void testPinningAllPages() throws HyracksDataException {
+		int numPages = 10;
+		TestStorageManagerComponentHolder.init(PAGE_SIZE, numPages, 1);
+
+		IBufferCache bufferCache = TestStorageManagerComponentHolder
+				.getBufferCache(ctx);
+		IFileMapProvider fmp = TestStorageManagerComponentHolder
+				.getFileMapProvider(ctx);
+
+		FileReference firstFileRef = new FileReference(new File(fileName));
+		bufferCache.createFile(firstFileRef);
+		int fileId = fmp.lookupFileId(firstFileRef);
+		bufferCache.openFile(fileId);
+
+		// Pin all pages.
+		ICachedPage[] pages = new ICachedPage[numPages];
+		for (int i = 0; i < numPages; ++i) {
+			pages[i] = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, i), true);
+		}
+
+		// Try to pin another page. We expect a HyracksDataException.
+		ICachedPage errorPage = null;
+		try {
+			errorPage = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, numPages), true);
+		} catch (HyracksDataException e) {
+			// This is the expected outcome.
+			// The BufferCache should still be able to function properly.
+			// Try unpinning all pages.
+			for (int i = 0; i < numPages; ++i) {
+				bufferCache.unpin(pages[i]);
+			}
+			// Now try pinning the page that failed above again.
+			errorPage = bufferCache.pin(
+					BufferedFileHandle.getDiskPageId(fileId, numPages), true);
+			// Unpin it.
+			bufferCache.unpin(errorPage);
+			// Cleanup.
+			bufferCache.closeFile(fileId);
+			bufferCache.close();
+			return;
+		} catch (Exception e) {
+			fail("Expected a HyracksDataException when pinning more pages than available but got another exception: "
+					+ e.getMessage());
+		}
+		fail("Expected a HyracksDataException when pinning more pages than available.");
+	}
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
index 680fd3b..80502eb 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
@@ -13,7 +13,7 @@
 import org.junit.Assert;
 import org.junit.Test;
 
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
 import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
@@ -24,25 +24,25 @@
 import edu.uci.ics.hyracks.test.support.TestUtils;
 
 public class BufferCacheTest {
-    protected static final List<String> openedFiles = new ArrayList<String>();    
+    protected static final List<String> openedFiles = new ArrayList<String>();
     protected static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
     protected static final String tmpDir = System.getProperty("java.io.tmpdir");
-    protected static final String sep = System.getProperty("file.separator");    
-    
+    protected static final String sep = System.getProperty("file.separator");
+
     private static final int PAGE_SIZE = 256;
     private static final int NUM_PAGES = 10;
     private static final int MAX_OPEN_FILES = 20;
     private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
-    private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-    
+    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
     private static final Random rnd = new Random(50);
-    
+
     private String getFileName() {
         String fileName = tmpDir + sep + simpleDateFormat.format(new Date()) + openedFiles.size();
         openedFiles.add(fileName);
         return fileName;
-    }          
-    
+    }
+
     @Test
     public void simpleOpenPinCloseTest() throws HyracksDataException {
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
@@ -54,85 +54,84 @@
         int fileId = fmp.lookupFileId(file);
         int num = 10;
         int testPageId = 0;
-        
+
         bufferCache.openFile(fileId);
-        
+
         ICachedPage page = null;
-        
+
         // tryPin should fail
         page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
         Assert.assertNull(page);
-        
+
         // pin page should succeed
         page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
         page.acquireWriteLatch();
-        try {            
-            for(int i = 0; i < num; i++) {
+        try {
+            for (int i = 0; i < num; i++) {
                 page.getBuffer().putInt(i * 4, i);
-            }               
-            
+            }
+
             // try pin should succeed         
             ICachedPage page2 = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
             Assert.assertNotNull(page2);
             bufferCache.unpin(page2);
-            
+
         } finally {
             page.releaseWriteLatch();
             bufferCache.unpin(page);
-        }                        
-        
+        }
+
         bufferCache.closeFile(fileId);
-        
+
         boolean exceptionThrown = false;
-        
+
         // tryPin should fail since file is not open
         try {
             page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
-        } catch(HyracksDataException e) {
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
         Assert.assertTrue(exceptionThrown);
-        
+
         // pin should fail since file is not open
         exceptionThrown = false;
         try {
             page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), false);
-        } catch(HyracksDataException e) {
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
         Assert.assertTrue(exceptionThrown);
-        
-        
+
         // open file again
         bufferCache.openFile(fileId);
-        
+
         // tryPin should succeed because page should still be cached        
-        page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));        
+        page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
         Assert.assertNotNull(page);
         page.acquireReadLatch();
         try {
             // verify contents of page
-            for(int i = 0; i < num; i++) {
+            for (int i = 0; i < num; i++) {
                 Assert.assertEquals(page.getBuffer().getInt(i * 4), i);
             }
         } finally {
             page.releaseReadLatch();
             bufferCache.unpin(page);
-        }                
-        
-        bufferCache.closeFile(fileId);        
+        }
+
+        bufferCache.closeFile(fileId);
         bufferCache.close();
     }
-        
+
     @Test
     public void simpleMaxOpenFilesTest() throws HyracksDataException {
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
         IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        
+
         List<Integer> fileIds = new ArrayList<Integer>();
-        
-        for(int i = 0; i < MAX_OPEN_FILES; i++) {
+
+        for (int i = 0; i < MAX_OPEN_FILES; i++) {
             String fileName = getFileName();
             FileReference file = new FileReference(new File(fileName));
             bufferCache.createFile(file);
@@ -140,9 +139,9 @@
             bufferCache.openFile(fileId);
             fileIds.add(fileId);
         }
-        
+
         boolean exceptionThrown = false;
-        
+
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
@@ -150,16 +149,16 @@
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
-        } catch(HyracksDataException e) {
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
         Assert.assertTrue(exceptionThrown);
-        
+
         // close a random file
         int ix = Math.abs(rnd.nextInt()) % fileIds.size();
         bufferCache.closeFile(fileIds.get(ix));
         fileIds.remove(ix);
-        
+
         // now open should succeed again
         exceptionThrown = false;
         try {
@@ -169,58 +168,58 @@
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
             fileIds.add(fileId);
-            
-        } catch(HyracksDataException e) {
+
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
         Assert.assertFalse(exceptionThrown);
-        
-        for(Integer i : fileIds) {
+
+        for (Integer i : fileIds) {
             bufferCache.closeFile(i.intValue());
         }
-        
+
         bufferCache.close();
     }
-    
+
     @Test
     public void contentCheckingMaxOpenFilesTest() throws HyracksDataException {
         TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
         IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
         IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        
+
         List<Integer> fileIds = new ArrayList<Integer>();
-        Map<Integer, ArrayList<Integer>> pageContents = new HashMap<Integer, ArrayList<Integer>>();        
+        Map<Integer, ArrayList<Integer>> pageContents = new HashMap<Integer, ArrayList<Integer>>();
         int num = 10;
-        int testPageId = 0;        
-        
+        int testPageId = 0;
+
         // open max number of files and write some stuff into their first page
-        for(int i = 0; i < MAX_OPEN_FILES; i++) {
+        for (int i = 0; i < MAX_OPEN_FILES; i++) {
             String fileName = getFileName();
             FileReference file = new FileReference(new File(fileName));
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
             fileIds.add(fileId);
-            
-            ICachedPage page = null;            
+
+            ICachedPage page = null;
             page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
             page.acquireWriteLatch();
-            try {            
-                ArrayList<Integer> values = new ArrayList<Integer>();                
-                for(int j = 0; j < num; j++) {
+            try {
+                ArrayList<Integer> values = new ArrayList<Integer>();
+                for (int j = 0; j < num; j++) {
                     int x = Math.abs(rnd.nextInt());
                     page.getBuffer().putInt(j * 4, x);
                     values.add(x);
                 }
-                pageContents.put(fileId, values);                
+                pageContents.put(fileId, values);
             } finally {
                 page.releaseWriteLatch();
                 bufferCache.unpin(page);
-            }            
-        }        
-        
+            }
+        }
+
         boolean exceptionThrown = false;
-        
+
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
@@ -228,82 +227,82 @@
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
-        } catch(HyracksDataException e) {
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
-        Assert.assertTrue(exceptionThrown);        
-        
+        Assert.assertTrue(exceptionThrown);
+
         // close a few random files
         ArrayList<Integer> closedFileIds = new ArrayList<Integer>();
         int filesToClose = 5;
-        for(int i = 0; i < filesToClose; i++) {
-            int ix = Math.abs(rnd.nextInt()) % fileIds.size();            
+        for (int i = 0; i < filesToClose; i++) {
+            int ix = Math.abs(rnd.nextInt()) % fileIds.size();
             bufferCache.closeFile(fileIds.get(ix));
             closedFileIds.add(fileIds.get(ix));
             fileIds.remove(ix);
-        }        
-        
+        }
+
         // now open a few new files
-        for(int i = 0; i < filesToClose; i++) {
+        for (int i = 0; i < filesToClose; i++) {
             String fileName = getFileName();
             FileReference file = new FileReference(new File(fileName));
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
             bufferCache.openFile(fileId);
             fileIds.add(fileId);
-        }        
-        
+        }
+
         // since all files are open, next open should fail
         try {
             String fileName = getFileName();
             FileReference file = new FileReference(new File(fileName));
             bufferCache.createFile(file);
             int fileId = fmp.lookupFileId(file);
-            bufferCache.openFile(fileId);            
-        } catch(HyracksDataException e) {
+            bufferCache.openFile(fileId);
+        } catch (HyracksDataException e) {
             exceptionThrown = true;
         }
-        Assert.assertTrue(exceptionThrown);       
-        
+        Assert.assertTrue(exceptionThrown);
+
         // close a few random files again        
-        for(int i = 0; i < filesToClose; i++) {
-            int ix = Math.abs(rnd.nextInt()) % fileIds.size();            
+        for (int i = 0; i < filesToClose; i++) {
+            int ix = Math.abs(rnd.nextInt()) % fileIds.size();
             bufferCache.closeFile(fileIds.get(ix));
             closedFileIds.add(fileIds.get(ix));
             fileIds.remove(ix);
         }
-        
+
         // now open those closed files again and verify their contents
-        for(int i = 0; i < filesToClose; i++) {
+        for (int i = 0; i < filesToClose; i++) {
             int closedFileId = closedFileIds.get(i);
             bufferCache.openFile(closedFileId);
             fileIds.add(closedFileId);
-            
+
             // pin first page and verify contents
-            ICachedPage page = null;            
+            ICachedPage page = null;
             page = bufferCache.pin(BufferedFileHandle.getDiskPageId(closedFileId, testPageId), false);
             page.acquireReadLatch();
-            try {            
-                ArrayList<Integer> values = pageContents.get(closedFileId);                
-                for(int j = 0; j < values.size(); j++) {
-                    Assert.assertEquals(values.get(j).intValue(), page.getBuffer().getInt(j * 4));                    
-                }      
+            try {
+                ArrayList<Integer> values = pageContents.get(closedFileId);
+                for (int j = 0; j < values.size(); j++) {
+                    Assert.assertEquals(values.get(j).intValue(), page.getBuffer().getInt(j * 4));
+                }
             } finally {
                 page.releaseReadLatch();
                 bufferCache.unpin(page);
-            }            
-        }         
-        
-        for(Integer i : fileIds) {
+            }
+        }
+
+        for (Integer i : fileIds) {
             bufferCache.closeFile(i.intValue());
         }
-        
+
         bufferCache.close();
     }
-    
+
     @AfterClass
     public static void cleanup() throws Exception {
-        for(String s : openedFiles) {
+        for (String s : openedFiles) {
             File f = new File(s);
             f.deleteOnExit();
         }
diff --git a/hyracks/hyracks-tests/pom.xml b/hyracks/hyracks-tests/pom.xml
index 61c1fc0..b120ba82 100644
--- a/hyracks/hyracks-tests/pom.xml
+++ b/hyracks/hyracks-tests/pom.xml
@@ -2,13 +2,13 @@
   <modelVersion>4.0.0</modelVersion>
   <groupId>edu.uci.ics.hyracks</groupId>
   <artifactId>hyracks-tests</artifactId>
-  <version>0.1.9-SNAPSHOT</version>
+  <version>0.2.0-SNAPSHOT</version>
   <packaging>pom</packaging>
 
   <parent>
     <groupId>edu.uci.ics.hyracks</groupId>
     <artifactId>hyracks</artifactId>
-    <version>0.1.9-SNAPSHOT</version>
+    <version>0.2.0-SNAPSHOT</version>
   </parent>
 
   <modules>