Merged branches/hyracks_storage_cleanup@267 into trunk/hyracks@267
git-svn-id: https://hyracks.googlecode.com/svn/trunk@268 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs b/hyracks/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
index 367ddc4..7b596e8 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,264 @@
-#Wed Oct 06 08:06:49 PDT 2010
+#Mon Dec 20 19:05:17 PST 2010
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_assignment=0
+org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
+org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_after_package=1
+org.eclipse.jdt.core.formatter.blank_lines_before_field=0
+org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
+org.eclipse.jdt.core.formatter.blank_lines_before_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
+org.eclipse.jdt.core.formatter.blank_lines_before_package=0
+org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
+org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
+org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
+org.eclipse.jdt.core.formatter.comment.format_block_comments=true
+org.eclipse.jdt.core.formatter.comment.format_header=false
+org.eclipse.jdt.core.formatter.comment.format_html=true
+org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
+org.eclipse.jdt.core.formatter.comment.format_line_comments=true
+org.eclipse.jdt.core.formatter.comment.format_source_code=true
+org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
+org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
+org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
+org.eclipse.jdt.core.formatter.comment.line_length=80
+org.eclipse.jdt.core.formatter.compact_else_if=true
+org.eclipse.jdt.core.formatter.continuation_indentation=2
+org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
+org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_empty_lines=false
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
+org.eclipse.jdt.core.formatter.indentation.size=4
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
+org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.join_lines_in_comments=true
+org.eclipse.jdt.core.formatter.join_wrapped_lines=true
+org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.lineSplit=120
+org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.jdt.core.formatter.tabulation.char=space
+org.eclipse.jdt.core.formatter.tabulation.size=4
+org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
+org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
index 80cc608..f293a1f 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.client;
import java.util.UUID;
@@ -30,24 +45,22 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeInsertUpdateDeleteOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will insert tuples into the primary and secondary index using an insert pipeline
public class InsertPipelineExample {
- private static class Options {
+ private static class Options {
@Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
public String host;
@@ -56,18 +69,18 @@
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for insertion", required = true)
public int numTuples;
-
+
@Option(name = "-primary-btreename", usage = "B-Tree file name of primary index", required = true)
public String primaryBTreeName;
-
+
@Option(name = "-secondary-btreename", usage = "B-Tree file name of secondary index", required = true)
- public String secondaryBTreeName;
+ public String secondaryBTreeName;
}
public static void main(String[] args) throws Exception {
@@ -78,7 +91,7 @@
IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
JobSpecification job = createJob(options);
-
+
long start = System.currentTimeMillis();
UUID jobId = hcc.createJob(options.app, job);
hcc.start(jobId);
@@ -86,33 +99,42 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- // schema of tuples to be generated: 4 fields with int, string, string, string
- // we will use field 2 as primary key to fill a clustered index
- RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- UTF8StringSerializerDeserializer.INSTANCE, // this field will not go into B-Tree
- UTF8StringSerializerDeserializer.INSTANCE, // we will use this as payload
- IntegerSerializerDeserializer.INSTANCE, // we will use this field as key
- IntegerSerializerDeserializer.INSTANCE, // we will use this as payload
- UTF8StringSerializerDeserializer.INSTANCE // we will use this as payload
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples to be generated: 4 fields with int, string, string,
+ // string
+ // we will use field 2 as primary key to fill a clustered index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, // this field will
+ // not go into B-Tree
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+ // as payload
+ IntegerSerializerDeserializer.INSTANCE, // we will use this
+ // field as key
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as
+ // payload
+ UTF8StringSerializerDeserializer.INSTANCE // we will use this as
+ // payload
});
-
- // generate numRecords records with field 2 being unique, integer values in [0, 100000], and strings with max length of 10 characters, and random seed 100
- DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0, 100000, 10, 100);
+
+ // generate numRecords records with field 2 being unique, integer values
+ // in [0, 100000], and strings with max length of 10 characters, and
+ // random seed 100
+ DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
+ 100000, 10, 100);
// run data generator on first nodecontroller given
- PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
+ PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
dataGen.setPartitionConstraint(dataGenConstraint);
-
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
// prepare insertion into primary index
// tuples to be put into B-Tree shall have 4 fields
int primaryFieldCount = 4;
@@ -121,68 +143,82 @@
primaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[2] = new TypeTrait(4);
primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
-
- // the B-Tree expects its keyfields to be at the front of its input tuple
- int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple to field 0 of B-Tree tuple, etc.
+
+ // the B-Tree expects its keyfields to be at the front of its input
+ // tuple
+ int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input
+ // tuple to field 0 of
+ // B-Tree tuple, etc.
// comparator factories for primary index
IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
- primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
- IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- // create operator descriptor
- BTreeInsertUpdateDeleteOperatorDescriptor primaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, primarySplitProvider, fileMappingProviderProvider, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, primaryFieldPermutation, BTreeOp.BTO_INSERT);
+ primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+
+ // create operator descriptor
+ BTreeInsertUpdateDeleteOperatorDescriptor primaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
+ recDesc, storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, primaryFieldPermutation,
+ BTreeOp.BTO_INSERT);
PartitionConstraint primaryInsertConstraint = JobHelper.createPartitionConstraint(splitNCs);
primaryInsert.setPartitionConstraint(primaryInsertConstraint);
-
+
// prepare insertion into secondary index
// tuples to be put into B-Tree shall have 2 fields
- int secondaryFieldCount = 2;
+ int secondaryFieldCount = 2;
ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- secondaryTypeTraits[1] = new TypeTrait(4);
-
+ secondaryTypeTraits[1] = new TypeTrait(4);
+
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(secondaryTupleWriterFactory);
+ IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ secondaryTupleWriterFactory);
IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
-
- // the B-Tree expects its keyfields to be at the front of its input tuple
+
+ // the B-Tree expects its keyfields to be at the front of its input
+ // tuple
int[] secondaryFieldPermutation = { 1, 2 };
// comparator factories for primary index
IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
- secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
- secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
- IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- // create operator descriptor
- BTreeInsertUpdateDeleteOperatorDescriptor secondaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, secondarySplitProvider, fileMappingProviderProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, secondaryFieldPermutation, BTreeOp.BTO_INSERT);
+ secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+ secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+ IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
+ options.secondaryBTreeName);
+ // create operator descriptor
+ BTreeInsertUpdateDeleteOperatorDescriptor secondaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
+ recDesc, storageManager, btreeRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
+ secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories,
+ secondaryFieldPermutation, BTreeOp.BTO_INSERT);
PartitionConstraint secondaryInsertConstraint = JobHelper.createPartitionConstraint(splitNCs);
secondaryInsert.setPartitionConstraint(secondaryInsertConstraint);
-
+
// end the insert pipeline at this sink operator
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
PartitionConstraint nullSinkPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
nullSink.setPartitionConstraint(nullSinkPartitionConstraint);
-
- // distribute the records from the datagen via hashing to the bulk load ops
+
+ // distribute the records from the datagen via hashing to the bulk load
+ // ops
IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
- hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
+ hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
IConnectorDescriptor hashConn = new MToNHashPartitioningConnectorDescriptor(spec,
new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
-
+
// connect the ops
-
+
spec.connect(hashConn, dataGen, 0, primaryInsert, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsert, 0, secondaryInsert, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsert, 0, nullSink, 0);
-
+
spec.addRoot(nullSink);
-
- return spec;
- }
+
+ return spec;
+ }
}
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
index 6799128..7702aa5 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/JobHelper.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.client;
import java.io.File;
@@ -11,18 +26,18 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
public class JobHelper {
- public static IFileSplitProvider createFileSplitProvider(String[] splitNCs, String btreeFileName) {
- FileSplit[] fileSplits = new FileSplit[splitNCs.length];
- for (int i = 0; i < splitNCs.length; ++i) {
- String fileName = btreeFileName + "." + splitNCs[i];
+ public static IFileSplitProvider createFileSplitProvider(String[] splitNCs, String btreeFileName) {
+ FileSplit[] fileSplits = new FileSplit[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ String fileName = btreeFileName + "." + splitNCs[i];
fileSplits[i] = new FileSplit(splitNCs[i], new File(fileName));
- }
- IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
- return splitProvider;
+ }
+ IFileSplitProvider splitProvider = new ConstantFileSplitProvider(fileSplits);
+ return splitProvider;
}
-
- public static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
- LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+
+ public static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
for (int i = 0; i < splitNCs.length; ++i) {
lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
}
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 8a8f60e..1fc1604 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -43,18 +43,16 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will load a primary index from randomly generated data
@@ -65,19 +63,19 @@
@Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
public int port = 1099;
-
+
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for loading", required = true)
public int numTuples;
-
+
@Option(name = "-btreename", usage = "B-Tree file name", required = true)
public String btreeName;
-
+
@Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
public int sbSize = 32768;
}
@@ -98,39 +96,50 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- // schema of tuples to be generated: 5 fields with string, string, int, int, string
- // we will use field-index 2 as primary key to fill a clustered index
- RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- UTF8StringSerializerDeserializer.INSTANCE, // this field will not go into B-Tree
- UTF8StringSerializerDeserializer.INSTANCE, // we will use this as payload
- IntegerSerializerDeserializer.INSTANCE, // we will use this field as key
- IntegerSerializerDeserializer.INSTANCE, // we will use this as payload
- UTF8StringSerializerDeserializer.INSTANCE // we will use this as payload
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples to be generated: 5 fields with string, string, int,
+ // int, string
+ // we will use field-index 2 as primary key to fill a clustered index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, // this field will
+ // not go into B-Tree
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+ // as payload
+ IntegerSerializerDeserializer.INSTANCE, // we will use this
+ // field as key
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as
+ // payload
+ UTF8StringSerializerDeserializer.INSTANCE // we will use this as
+ // payload
});
-
- // generate numRecords records with field 2 being unique, integer values in [0, 100000], and strings with max length of 10 characters, and random seed 50
- DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0, 100000, 10, 50);
+
+ // generate numRecords records with field 2 being unique, integer values
+ // in [0, 100000], and strings with max length of 10 characters, and
+ // random seed 50
+ DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0,
+ 100000, 10, 50);
// run data generator on first nodecontroller given
- PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
+ PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
dataGen.setPartitionConstraint(dataGenConstraint);
-
+
// sort the tuples as preparation for bulk load
// fields to sort on
int[] sortFields = { 2 };
// comparators for sort fields
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
- comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields, comparatorFactories, recDesc);
+ comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
+ comparatorFactories, recDesc);
PartitionConstraint sorterConstraint = JobHelper.createPartitionConstraint(splitNCs);
sorter.setPartitionConstraint(sorterConstraint);
-
+
// tuples to be put into B-Tree shall have 4 fields
int fieldCount = 4;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
@@ -138,36 +147,39 @@
typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[2] = new TypeTrait(4);
typeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
- // the B-Tree expects its keyfields to be at the front of its input tuple
- int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple to field 0 of B-Tree tuple, etc.
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
+ // the B-Tree expects its keyfields to be at the front of its input
+ // tuple
+ int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
+ // to field 0 of B-Tree tuple,
+ // etc.
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec,
- bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, interiorFrameFactory,
- leafFrameFactory, typeTraits, comparatorFactories, fieldPermutation, 0.7f);
+ BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, storageManager,
+ btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, fieldPermutation, 0.7f);
PartitionConstraint bulkLoadConstraint = JobHelper.createPartitionConstraint(splitNCs);
btreeBulkLoad.setPartitionConstraint(bulkLoadConstraint);
-
- // distribute the records from the datagen via hashing to the bulk load ops
+
+ // distribute the records from the datagen via hashing to the bulk load
+ // ops
IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
- hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
+ hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
IConnectorDescriptor hashConn = new MToNHashPartitioningConnectorDescriptor(spec,
new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
-
+
spec.connect(hashConn, dataGen, 0, sorter, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
-
+
spec.addRoot(btreeBulkLoad);
-
- return spec;
- }
+
+ return spec;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
index 1bed3bf..9478c78 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
@@ -33,17 +33,15 @@
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeFileEnlistmentOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will enlist existing files as primary index
@@ -54,15 +52,15 @@
@Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
public int port = 1099;
-
+
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-btreename", usage = "B-Tree file name", required = true)
- public String btreeName;
+ public String btreeName;
}
public static void main(String[] args) throws Exception {
@@ -81,46 +79,44 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- // schema of tuples in existing files (see PrimaryIndexBulkLoadExample)
- RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE
- });
-
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples in existing files (see PrimaryIndexBulkLoadExample)
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
int fieldCount = 4;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(4);
typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[2] = new TypeTrait(4);
typeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
-
+
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- BTreeFileEnlistmentOperatorDescriptor fileEnlistmentOp = new BTreeFileEnlistmentOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, typeTraits, comparatorFactories);
+ BTreeFileEnlistmentOperatorDescriptor fileEnlistmentOp = new BTreeFileEnlistmentOperatorDescriptor(spec,
+ recDesc, storageManager, btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories);
PartitionConstraint fileEnlistmentConstraint = JobHelper.createPartitionConstraint(splitNCs);
- fileEnlistmentOp.setPartitionConstraint(fileEnlistmentConstraint);
-
+ fileEnlistmentOp.setPartitionConstraint(fileEnlistmentConstraint);
+
spec.addRoot(fileEnlistmentOp);
-
- return spec;
- }
+
+ return spec;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index 8e444a4..06b5bc9 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -38,18 +38,16 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will perform an ordered scan on the primary index
// i.e. a range-search for [-infinity, +infinity]
@@ -64,19 +62,19 @@
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-btreename", usage = "B-Tree file name to search", required = true)
- public String btreeName;
+ public String btreeName;
}
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
parser.parseArgument(args);
-
+
IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
JobSpecification job = createJob(options);
@@ -88,76 +86,83 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) throws HyracksDataException {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- int fieldCount = 4;
+ private static JobSpecification createJob(Options options) throws HyracksDataException {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ int fieldCount = 4;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(4);
typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[2] = new TypeTrait(4);
typeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
- // schema of tuples coming out of primary index
- RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE,
- });
-
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
+ // schema of tuples coming out of primary index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
+
// comparators for btree
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
- comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
-
+ comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+
// build tuple containing low and high search keys
- ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length*2); // high key and low key
- DataOutput dos = tb.getDataOutput();
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(100, dos); // low key
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(200, dos); // build high key
- tb.addFieldEndOffset();
-
- ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
- ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
- PartitionConstraint keyProviderPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
- keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
-
- int[] lowKeyFields = { 0 }; // low key is in field 0 of tuples going into search op
- int[] highKeyFields = { 1 }; // low key is in field 1 of tuples going into search op
-
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // high
+ // key
+ // and
+ // low
+ // key
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(100, dos); // low key
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(200, dos); // build
+ // high key
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraint keyProviderPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
+ keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
+
+ int[] lowKeyFields = { 0 }; // low key is in field 0 of tuples going
+ // into search op
+ int[] highKeyFields = { 1 }; // low key is in field 1 of tuples going
+ // into search op
+
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, typeTraits, comparatorFactories, true, lowKeyFields, highKeyFields, true, true);
+ BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
+ btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, true, lowKeyFields, highKeyFields, true, true);
PartitionConstraint btreeSearchConstraint = JobHelper.createPartitionConstraint(splitNCs);
btreeSearchOp.setPartitionConstraint(btreeSearchConstraint);
-
+
// have each node print the results of its respective B-Tree
PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
PartitionConstraint printerConstraint = JobHelper.createPartitionConstraint(splitNCs);
printer.setPartitionConstraint(printerConstraint);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, btreeSearchOp, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
-
+
spec.addRoot(printer);
-
- return spec;
- }
+
+ return spec;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 7702090..2c4d8fb 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -36,18 +36,16 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDiskOrderScanOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will load a secondary index with <key, primary-index key> pairs
// We require an existing primary index built with PrimaryIndexBulkLoadExample
@@ -62,20 +60,20 @@
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-primary-btreename", usage = "Name of primary-index B-Tree to load from", required = true)
public String primaryBTreeName;
-
+
@Option(name = "-secondary-btreename", usage = "B-Tree file name for secondary index to be built", required = true)
public String secondaryBTreeName;
-
+
@Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
public int sbSize = 32768;
}
-
+
public static void main(String[] args) throws Exception {
Options options = new Options();
CmdLineParser parser = new CmdLineParser(options);
@@ -92,43 +90,46 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
- // schema of tuples that we are retrieving from the primary index
- RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE, // we will use this as payload in secondary index
- UTF8StringSerializerDeserializer.INSTANCE, // we will use this ask key in secondary index
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE
- });
-
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
+ // schema of tuples that we are retrieving from the primary index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as
+ // payload in secondary
+ // index
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this
+ // ask key in
+ // secondary index
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
+
int primaryFieldCount = 4;
ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
primaryTypeTraits[0] = new TypeTrait(4);
primaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[2] = new TypeTrait(4);
primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for primary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
-
- // use a disk-order scan to read primary index
+
+ // use a disk-order scan to read primary index
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- BTreeDiskOrderScanOperatorDescriptor btreeScanOp = new BTreeDiskOrderScanOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, primarySplitProvider, fileMappingProviderProvider, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits);
- PartitionConstraint scanPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
- btreeScanOp.setPartitionConstraint(scanPartitionConstraint);
-
+ BTreeDiskOrderScanOperatorDescriptor btreeScanOp = new BTreeDiskOrderScanOperatorDescriptor(spec, recDesc,
+ storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits);
+ PartitionConstraint scanPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
+ btreeScanOp.setPartitionConstraint(scanPartitionConstraint);
+
// sort the tuples as preparation for bulk load into secondary index
// fields to sort on
int[] sortFields = { 1, 0 };
@@ -136,39 +137,41 @@
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
comparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
- ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields, comparatorFactories, recDesc);
+ ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields,
+ comparatorFactories, recDesc);
PartitionConstraint sorterConstraint = JobHelper.createPartitionConstraint(splitNCs);
sorter.setPartitionConstraint(sorterConstraint);
-
+
// tuples to be put into B-Tree shall have 2 fields
- int secondaryFieldCount = 2;
+ int secondaryFieldCount = 2;
ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- secondaryTypeTraits[1] = new TypeTrait(4);
-
+ secondaryTypeTraits[1] = new TypeTrait(4);
+
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(secondaryTupleWriterFactory);
+ IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ secondaryTupleWriterFactory);
IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
-
- // the B-Tree expects its keyfields to be at the front of its input tuple
+
+ // the B-Tree expects its keyfields to be at the front of its input
+ // tuple
int[] fieldPermutation = { 1, 0 };
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec,
- bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, secondaryInteriorFrameFactory,
- secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f);
+ BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, storageManager,
+ btreeRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
+ secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f);
PartitionConstraint bulkLoadConstraint = JobHelper.createPartitionConstraint(splitNCs);
- btreeBulkLoad.setPartitionConstraint(bulkLoadConstraint);
-
+ btreeBulkLoad.setPartitionConstraint(bulkLoadConstraint);
+
// connect the ops
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), btreeScanOp, 0, sorter, 0);
-
- //spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
-
+
spec.addRoot(btreeBulkLoad);
-
- return spec;
- }
+
+ return spec;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index 490169c..6fd39f0 100644
--- a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -38,18 +38,16 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.SimpleStorageManager;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will perform range search on the secondary index
// and then retrieve the corresponding source records from the primary index
@@ -64,15 +62,15 @@
@Option(name = "-app", usage = "Hyracks Application name", required = true)
public String app;
-
+
@Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
public String ncs;
-
+
@Option(name = "-primary-btreename", usage = "Primary B-Tree file name", required = true)
- public String primaryBTreeName;
-
+ public String primaryBTreeName;
+
@Option(name = "-secondary-btreename", usage = "Secondary B-Tree file name to search", required = true)
- public String secondaryBTreeName;
+ public String secondaryBTreeName;
}
public static void main(String[] args) throws Exception {
@@ -91,106 +89,125 @@
long end = System.currentTimeMillis();
System.err.println(start + " " + end + " " + (end - start));
}
-
- private static JobSpecification createJob(Options options) throws HyracksDataException {
-
- JobSpecification spec = new JobSpecification();
- String[] splitNCs = options.ncs.split(",");
-
- IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ private static JobSpecification createJob(Options options) throws HyracksDataException {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
- IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
-
- // schema of tuples coming out of secondary index
- RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- UTF8StringSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE
- });
-
- int secondaryFieldCount = 2;
+ IStorageManagerInterface storageManager = SimpleStorageManager.INSTANCE;
+
+ // schema of tuples coming out of secondary index
+ RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
+
+ int secondaryFieldCount = 2;
ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- secondaryTypeTraits[1] = new TypeTrait(4);
-
+ secondaryTypeTraits[1] = new TypeTrait(4);
+
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(secondaryTupleWriterFactory);
+ IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ secondaryTupleWriterFactory);
IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
-
+
// schema of tuples coming out of primary index
- RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE,
- });
-
+ RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, });
+
int primaryFieldCount = 4;
ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
primaryTypeTraits[0] = new TypeTrait(4);
primaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[2] = new TypeTrait(4);
primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
-
- // comparators for btree, note that we only need a comparator for the non-unique key
- // i.e. we will have a range condition on the first field only (implying [-infinity, +infinity] for the second field)
+
+ // comparators for btree, note that we only need a comparator for the
+ // non-unique key
+ // i.e. we will have a range condition on the first field only (implying
+ // [-infinity, +infinity] for the second field)
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
- comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
+ comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
// build tuple containing low and high search keys
- ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length*2); // low and high key
- DataOutput dos = tb.getDataOutput();
-
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos); // low key
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("f", dos); // high key
- tb.addFieldEndOffset();
-
- ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
- ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
- PartitionConstraint keyProviderPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
- keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
-
- int[] secondaryLowKeyFields = { 0 }; // low key is in field 0 of tuples going into secondary index search op
- int[] secondaryHighKeyFields = { 1 }; // high key is in field 1 of tuples going into secondary index search op
-
- IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc, bufferCacheProvider, btreeRegistryProvider, secondarySplitProvider, fileMappingProviderProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, true, secondaryLowKeyFields, secondaryHighKeyFields, true, true);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length * 2); // low
+ // and
+ // high
+ // key
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos); // low
+ // key
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("f", dos); // high
+ // key
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraint keyProviderPartitionConstraint = JobHelper.createPartitionConstraint(splitNCs);
+ keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
+
+ int[] secondaryLowKeyFields = { 0 }; // low key is in field 0 of tuples
+ // going into secondary index
+ // search op
+ int[] secondaryHighKeyFields = { 1 }; // high key is in field 1 of
+ // tuples going into secondary
+ // index search op
+
+ IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
+ options.secondaryBTreeName);
+ BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
+ storageManager, btreeRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
+ secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, true, secondaryLowKeyFields,
+ secondaryHighKeyFields, true, true);
PartitionConstraint secondarySearchConstraint = JobHelper.createPartitionConstraint(splitNCs);
secondarySearchOp.setPartitionConstraint(secondarySearchConstraint);
-
+
// secondary index will output tuples with [UTF8String, Integer]
- // the Integer field refers to the key in the primary index of the source data records
- int[] primaryLowKeyFields = { 1 }; // low key is in field 0 of tuples going into primary index search op
- int[] primaryHighKeyFields = { 1 }; // high key is in field 1 of tuples going into primary index search op
-
+ // the Integer field refers to the key in the primary index of the
+ // source data records
+ int[] primaryLowKeyFields = { 1 }; // low key is in field 0 of tuples
+ // going into primary index search op
+ int[] primaryHighKeyFields = { 1 }; // high key is in field 1 of tuples
+ // going into primary index search
+ // op
+
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc, bufferCacheProvider, btreeRegistryProvider, primarySplitProvider, fileMappingProviderProvider, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits, comparatorFactories, true, primaryLowKeyFields, primaryHighKeyFields, true, true);
+ BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+ storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, comparatorFactories, true, primaryLowKeyFields,
+ primaryHighKeyFields, true, true);
PartitionConstraint primarySearchConstraint = JobHelper.createPartitionConstraint(splitNCs);
primarySearchOp.setPartitionConstraint(primarySearchConstraint);
-
+
// have each node print the results of its respective B-Tree
PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
PartitionConstraint printerConstraint = JobHelper.createPartitionConstraint(splitNCs);
printer.setPartitionConstraint(printerConstraint);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondarySearchOp, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), secondarySearchOp, 0, primarySearchOp, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), primarySearchOp, 0, printer, 0);
-
+
spec.addRoot(printer);
-
- return spec;
- }
+
+ return spec;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
index 4c3c7524..46ef79f 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.helper;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java
deleted file mode 100644
index db1368d..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
-
-public class BufferCacheProvider implements IBufferCacheProvider {
- private static final long serialVersionUID = 1L;
-
- public static final BufferCacheProvider INSTANCE = new BufferCacheProvider();
-
- private BufferCacheProvider() {
- }
-
- @Override
- public IBufferCache getBufferCache() {
- return RuntimeContext.getInstance().getBufferCache();
- }
-
- @Override
- public FileManager getFileManager() {
- return RuntimeContext.getInstance().getFileManager();
- }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
index 0ee9210..a325739 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.helper;
import java.io.DataOutput;
@@ -20,129 +35,130 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor{
-
- private static final long serialVersionUID = 1L;
- private final int numRecords;
-
- private final int intMinVal;
- private final int intMaxVal;
- private final int maxStrLen;
- private final int uniqueField;
- private final long randomSeed;
-
- public DataGenOperatorDescriptor(JobSpecification spec, RecordDescriptor outputRecord, int numRecords, int uniqueField, int intMinVal, int intMaxVal, int maxStrLen, long randomSeed) {
- super(spec, 0, 1);
- this.numRecords = numRecords;
- this.uniqueField = uniqueField;
- this.intMinVal = intMinVal;
- this.intMaxVal = intMaxVal;
- this.maxStrLen = maxStrLen;
- this.randomSeed = randomSeed;
- recordDescriptors[0] = outputRecord;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) {
-
- final ByteBuffer outputFrame = ctx.getResourceManager().allocateFrame();
- final FrameTupleAppender appender = new FrameTupleAppender(ctx);
- final RecordDescriptor recDesc = recordDescriptors[0];
- final ArrayTupleBuilder tb = new ArrayTupleBuilder(recDesc.getFields().length);
- final Random rnd = new Random(randomSeed);
- final int maxUniqueAttempts = 20;
-
- return new AbstractUnaryOutputSourceOperatorNodePushable() {
-
- // for quick & dirty exclusion of duplicates
- // WARNING: could contain numRecord entries and use a lot of memory
- HashSet<String> stringHs = new HashSet<String>();
- HashSet<Integer> intHs = new HashSet<Integer>();
-
- @Override
- public void initialize() throws HyracksDataException {
- writer.open();
- try {
- appender.reset(outputFrame, true);
- for(int i = 0; i < numRecords; i++) {
- tb.reset();
- for(int j = 0; j < recDesc.getFields().length; j++) {
- genField(tb, j);
- }
+public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(outputFrame, writer);
- appender.reset(outputFrame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- FrameUtils.flushFrame(outputFrame, writer);
- }
- finally {
- writer.close();
- }
+ private static final long serialVersionUID = 1L;
+ private final int numRecords;
+
+ private final int intMinVal;
+ private final int intMaxVal;
+ private final int maxStrLen;
+ private final int uniqueField;
+ private final long randomSeed;
+
+ public DataGenOperatorDescriptor(JobSpecification spec, RecordDescriptor outputRecord, int numRecords,
+ int uniqueField, int intMinVal, int intMaxVal, int maxStrLen, long randomSeed) {
+ super(spec, 0, 1);
+ this.numRecords = numRecords;
+ this.uniqueField = uniqueField;
+ this.intMinVal = intMinVal;
+ this.intMaxVal = intMaxVal;
+ this.maxStrLen = maxStrLen;
+ this.randomSeed = randomSeed;
+ recordDescriptors[0] = outputRecord;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+
+ final ByteBuffer outputFrame = ctx.getResourceManager().allocateFrame();
+ final FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ final RecordDescriptor recDesc = recordDescriptors[0];
+ final ArrayTupleBuilder tb = new ArrayTupleBuilder(recDesc.getFields().length);
+ final Random rnd = new Random(randomSeed);
+ final int maxUniqueAttempts = 20;
+
+ return new AbstractUnaryOutputSourceOperatorNodePushable() {
+
+ // for quick & dirty exclusion of duplicates
+ // WARNING: could contain numRecord entries and use a lot of memory
+ HashSet<String> stringHs = new HashSet<String>();
+ HashSet<Integer> intHs = new HashSet<Integer>();
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ writer.open();
+ try {
+ appender.reset(outputFrame, true);
+ for (int i = 0; i < numRecords; i++) {
+ tb.reset();
+ for (int j = 0; j < recDesc.getFields().length; j++) {
+ genField(tb, j);
+ }
+
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(outputFrame, writer);
+ appender.reset(outputFrame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+ FrameUtils.flushFrame(outputFrame, writer);
+ } finally {
+ writer.close();
+ }
}
-
- private void genField(ArrayTupleBuilder tb, int fieldIndex) throws HyracksDataException {
- DataOutput dos = tb.getDataOutput();
- if(recDesc.getFields()[fieldIndex] instanceof IntegerSerializerDeserializer) {
- int val = -1;
- if(fieldIndex == uniqueField) {
- int attempt = 0;
- while(attempt < maxUniqueAttempts) {
- int tmp = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
- if(intHs.contains(tmp)) attempt++;
- else {
- val = tmp;
- intHs.add(val);
- break;
- }
- }
- if(attempt == maxUniqueAttempts) throw new HyracksDataException("MaxUnique attempts reached in datagen");
- }
- else {
- val = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
- }
- recDesc.getFields()[fieldIndex].serialize(val, dos);
- tb.addFieldEndOffset();
- } else if (recDesc.getFields()[fieldIndex] instanceof UTF8StringSerializerDeserializer) {
- String val = null;
- if(fieldIndex == uniqueField) {
- int attempt = 0;
- while(attempt < maxUniqueAttempts) {
- String tmp = randomString(maxStrLen, rnd);
- if(stringHs.contains(tmp)) attempt++;
- else {
- val = tmp;
- stringHs.add(val);
- break;
- }
- }
- if(attempt == maxUniqueAttempts) throw new HyracksDataException("MaxUnique attempts reached in datagen");
- }
- else {
- val = randomString(maxStrLen, rnd);
- }
- recDesc.getFields()[fieldIndex].serialize(val, dos);
- tb.addFieldEndOffset();
- } else {
- throw new HyracksDataException("Type unsupported in data generator. Only integers and strings allowed");
- }
- }
-
- private String randomString(int length, Random random) {
- String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- return strBuilder.toString();
- }
- };
- }
+
+ private void genField(ArrayTupleBuilder tb, int fieldIndex) throws HyracksDataException {
+ DataOutput dos = tb.getDataOutput();
+ if (recDesc.getFields()[fieldIndex] instanceof IntegerSerializerDeserializer) {
+ int val = -1;
+ if (fieldIndex == uniqueField) {
+ int attempt = 0;
+ while (attempt < maxUniqueAttempts) {
+ int tmp = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
+ if (intHs.contains(tmp))
+ attempt++;
+ else {
+ val = tmp;
+ intHs.add(val);
+ break;
+ }
+ }
+ if (attempt == maxUniqueAttempts)
+ throw new HyracksDataException("MaxUnique attempts reached in datagen");
+ } else {
+ val = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
+ }
+ recDesc.getFields()[fieldIndex].serialize(val, dos);
+ tb.addFieldEndOffset();
+ } else if (recDesc.getFields()[fieldIndex] instanceof UTF8StringSerializerDeserializer) {
+ String val = null;
+ if (fieldIndex == uniqueField) {
+ int attempt = 0;
+ while (attempt < maxUniqueAttempts) {
+ String tmp = randomString(maxStrLen, rnd);
+ if (stringHs.contains(tmp))
+ attempt++;
+ else {
+ val = tmp;
+ stringHs.add(val);
+ break;
+ }
+ }
+ if (attempt == maxUniqueAttempts)
+ throw new HyracksDataException("MaxUnique attempts reached in datagen");
+ } else {
+ val = randomString(maxStrLen, rnd);
+ }
+ recDesc.getFields()[fieldIndex].serialize(val, dos);
+ tb.addFieldEndOffset();
+ } else {
+ throw new HyracksDataException(
+ "Type unsupported in data generator. Only integers and strings allowed");
+ }
+ }
+
+ private String randomString(int length, Random random) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
+ };
+ }
}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java
deleted file mode 100644
index 5a03045..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import java.util.Hashtable;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
-
-public class FileMappingProvider implements IFileMappingProvider {
-
- private static final long serialVersionUID = 1L;
- private int nextFileId = 0;
- private Map<String, Integer> map = new Hashtable<String, Integer>();
-
- @Override
- public Integer mapNameToFileId(String name, boolean create) {
- Integer val = map.get(name);
- if(create) {
- if(val == null) {
- int ret = nextFileId;
- map.put(name, nextFileId++);
- return ret;
- }
- else {
- return null; // create requested but value already exists
- }
- }
- else {
- return val; // just return value
- }
- }
-
- @Override
- public void unmapName(String name) {
- map.remove(name);
- }
-
- @Override
- public Integer getFileId(String name) {
- return map.get(name);
- }
-}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java
deleted file mode 100644
index 4f2c8f4..0000000
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
-
-public class FileMappingProviderProvider implements IFileMappingProviderProvider {
- private static final long serialVersionUID = 1L;
-
- public static final IFileMappingProviderProvider INSTANCE = new FileMappingProviderProvider();
-
- @Override
- public IFileMappingProvider getFileMappingProvider() {
- return RuntimeContext.getInstance().getFileMappingProvider();
- }
-}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
index e87d8bd..d56aef7 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.helper;
import java.util.logging.Logger;
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
index d542ed9..9cd55ed 100644
--- a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
@@ -1,25 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.examples.btree.helper;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.FileMappingProvider;
import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class RuntimeContext {
private static RuntimeContext INSTANCE;
- private FileManager fileManager;
- private IBufferCache bufferCache;
private BTreeRegistry btreeRegistry;
- private IFileMappingProvider fileMappingProvider;
-
+ private IBufferCache bufferCache;
+ private IFileMapManager fileMapManager;
+
private RuntimeContext() {
}
-
+
public static void initialize() {
if (INSTANCE != null) {
throw new IllegalStateException("Instance already initialized");
@@ -37,15 +52,14 @@
private void stop() {
bufferCache.close();
- fileManager.close();
}
private void start() {
- fileManager = new FileManager();
- bufferCache = new BufferCache(new HeapBufferAllocator(), new ClockPageReplacementStrategy(), fileManager,
- 32768, 1024);
+ fileMapManager = new SimpleFileMapManager();
+ ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+ IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+ bufferCache = new BufferCache(allocator, prs, fileMapManager, 32768, 50);
btreeRegistry = new BTreeRegistry();
- fileMappingProvider = new FileMappingProvider();
}
public static RuntimeContext getInstance() {
@@ -56,15 +70,11 @@
return bufferCache;
}
- public FileManager getFileManager() {
- return fileManager;
+ public IFileMapProvider getFileMapManager() {
+ return fileMapManager;
}
public BTreeRegistry getBTreeRegistry() {
return btreeRegistry;
}
-
- public IFileMappingProvider getFileMappingProvider() {
- return fileMappingProvider;
- }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleFileMapManager.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleFileMapManager.java
new file mode 100644
index 0000000..21f864e
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleFileMapManager.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+
+public class SimpleFileMapManager implements IFileMapManager {
+
+ private static final long serialVersionUID = 1L;
+ private Map<Integer, String> id2nameMap = new HashMap<Integer, String>();
+ private Map<String, Integer> name2IdMap = new HashMap<String, Integer>();
+ private int idCounter = 0;
+
+ @Override
+ public String lookupFileName(int fileId) throws HyracksDataException {
+ String fName = id2nameMap.get(fileId);
+ if (fName == null) {
+ throw new HyracksDataException("No mapping found for id: " + fileId);
+ }
+ return fName;
+ }
+
+ @Override
+ public int lookupFileId(String fileName) throws HyracksDataException {
+ Integer fileId = name2IdMap.get(fileName);
+ if (fileId == null) {
+ throw new HyracksDataException("No mapping found for name: " + fileName);
+ }
+ return fileId;
+ }
+
+ @Override
+ public boolean isMapped(String fileName) {
+ return name2IdMap.containsKey(fileName);
+ }
+
+ @Override
+ public boolean isMapped(int fileId) {
+ return id2nameMap.containsKey(fileId);
+ }
+
+ @Override
+ public void unregisterFile(int fileId) throws HyracksDataException {
+ String fileName = id2nameMap.remove(fileId);
+ name2IdMap.remove(fileName);
+ }
+
+ @Override
+ public void registerFile(String fileName) throws HyracksDataException {
+ Integer fileId = idCounter++;
+ id2nameMap.put(fileId, fileName);
+ name2IdMap.put(fileName, fileId);
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleStorageManager.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleStorageManager.java
new file mode 100644
index 0000000..8348bb9
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/SimpleStorageManager.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class SimpleStorageManager implements IStorageManagerInterface {
+
+ private static final long serialVersionUID = 1L;
+
+ public static SimpleStorageManager INSTANCE = new SimpleStorageManager();
+
+ @Override
+ public IBufferCache getBufferCache() {
+ return RuntimeContext.getInstance().getBufferCache();
+ }
+
+ @Override
+ public IFileMapProvider getFileMapProvider() {
+ return RuntimeContext.getInstance().getFileMapManager();
+ }
+
+}
diff --git a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
index 5008c26..54b97c1 100644
--- a/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
+++ b/hyracks/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
@@ -17,7 +17,6 @@
import java.io.DataOutput;
import java.io.File;
-import java.io.RandomAccessFile;
import org.junit.Test;
@@ -55,12 +54,9 @@
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistryProvider;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BufferCacheProvider;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.ConstantTupleSourceOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.HyracksSimpleStorageManagerInterface;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrame;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
@@ -72,21 +68,20 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
public class BTreeOperatorsTest extends AbstractIntegrationTest {
-
- @Test
- public void bulkLoadTest() throws Exception {
- // relies on the fact that NCs are run from same process
- //System.setProperty("NodeControllerDataPath", System.getProperty("java.io.tmpdir") + "/");
-
- JobSpecification spec = new JobSpecification();
-
- FileSplit[] ordersSplits = new FileSplit[] {
- new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl")) };
+
+ private static HyracksSimpleStorageManagerInterface storageManager = new HyracksSimpleStorageManagerInterface(8192,
+ 20);
+
+ @Test
+ public void bulkLoadTest() throws Exception {
+
+ JobSpecification spec = new JobSpecification();
+
+ FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl")) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
@@ -94,175 +89,193 @@
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
-
+
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
- PartitionConstraint ordersPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] {
- new AbsoluteLocationConstraint(NC1_ID) });
+ PartitionConstraint ordersPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
ordScanner.setPartitionConstraint(ordersPartitionConstraint);
InMemorySortOperatorDescriptor sorter = new InMemorySortOperatorDescriptor(spec, new int[] { 0 },
new IBinaryComparatorFactory[] { UTF8StringBinaryComparatorFactory.INSTANCE }, ordersDesc);
- PartitionConstraint sortersPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] {
- new AbsoluteLocationConstraint(NC1_ID) });
+ PartitionConstraint sortersPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
sorter.setPartitionConstraint(sortersPartitionConstraint);
-
+
// declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[2] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// declare keys
- int keyFieldCount = 1;
- IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keyFieldCount];
- comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBufferCacheProvider bufferCacheProvider = new BufferCacheProvider();
- IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
- IFileMappingProviderProvider fileMappingProviderProvider = new FileMappingProviderProvider();
-
- int[] fieldPermutation = { 0, 4, 5};
- String btreeName = "btree.bin";
- String nc1FileName = System.getProperty("java.io.tmpdir") + "/nc1/" + btreeName;
- IFileSplitProvider btreeSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new FileSplit(NC1_ID, new File(nc1FileName)) } );
-
- BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, typeTraits, comparatorFactories, fieldPermutation, 0.7f);
- PartitionConstraint btreePartitionConstraintA = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
- btreeBulkLoad.setPartitionConstraint(btreePartitionConstraintA);
-
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keyFieldCount];
+ comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
+
+ int[] fieldPermutation = { 0, 4, 5 };
+ String btreeName = "btree.bin";
+ String nc1FileName = System.getProperty("java.io.tmpdir") + "/nc1/" + btreeName;
+ IFileSplitProvider btreeSplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+ new File(nc1FileName)) });
+
+ BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, storageManager,
+ btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, fieldPermutation, 0.7f);
+ PartitionConstraint btreePartitionConstraintA = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ btreeBulkLoad.setPartitionConstraint(btreePartitionConstraintA);
+
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
-
+
spec.addRoot(btreeBulkLoad);
runTest(spec);
-
- // construct a multicomparator from the factories (only for printing purposes)
- IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
- for(int i = 0; i < comparatorFactories.length; i++) {
- comparators[i] = comparatorFactories[i].createBinaryComparator();
- }
-
+
+ // construct a multicomparator from the factories (only for printing
+ // purposes)
+ IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
+ for (int i = 0; i < comparatorFactories.length; i++) {
+ comparators[i] = comparatorFactories[i].createBinaryComparator();
+ }
+
MultiComparator cmp = new MultiComparator(typeTraits, comparators);
-
+
// try an ordered scan on the bulk-loaded btree
- int btreeFileId = 0; // TODO: this relies on the way FileMappingProvider assigns ids (in sequence starting from 0)
+ int btreeFileId = storageManager.getFileMapProvider().lookupFileId(nc1FileName);
+ storageManager.getBufferCache().openFile(btreeFileId);
BTree btree = btreeRegistryProvider.getBTreeRegistry().get(btreeFileId);
IBTreeCursor scanCursor = new RangeSearchCursor(leafFrameFactory.getFrame());
RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrameFactory.getFrame(), interiorFrameFactory.getFrame(), null);
+ BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrameFactory.getFrame(),
+ interiorFrameFactory.getFrame(), null);
btree.search(scanCursor, nullPred, opCtx);
try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
String rec = cmp.printTuple(frameTuple, ordersDesc.getFields());
System.out.println(rec);
- }
+ }
} catch (Exception e) {
- e.printStackTrace();
+ e.printStackTrace();
} finally {
- scanCursor.close();
- }
- }
-
- @Test
- public void btreeSearchTest() throws Exception {
- // relies on the fact that NCs are run from same process
- System.setProperty("NodeControllerDataPath", System.getProperty("java.io.tmpdir") + "/");
-
- JobSpecification spec = new JobSpecification();
-
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ scanCursor.close();
+ }
+ storageManager.getBufferCache().closeFile(btreeFileId);
+ }
+
+ @Test
+ public void btreeSearchTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[2] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
+
// declare keys
- int keyFieldCount = 1;
- IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keyFieldCount];
- comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
-
- // construct a multicomparator from the factories (only for printing purposes)
- IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
- for(int i = 0; i < comparatorFactories.length; i++) {
- comparators[i] = comparatorFactories[i].createBinaryComparator();
- }
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[keyFieldCount];
+ comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+
+ // construct a multicomparator from the factories (only for printing
+ // purposes)
+ IBinaryComparator[] comparators = new IBinaryComparator[comparatorFactories.length];
+ for (int i = 0; i < comparatorFactories.length; i++) {
+ comparators[i] = comparatorFactories[i].createBinaryComparator();
+ }
MultiComparator cmp = new MultiComparator(typeTraits, comparators);
-
+
// build tuple containing low and high search key
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getKeyFieldCount()*2); // high key and low key
- DataOutput dos = tb.getDataOutput();
-
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos); // low key
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos); // high key
- tb.addFieldEndOffset();
-
- ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
-
- ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec, keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
- PartitionConstraint keyProviderPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
- keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
-
- IBufferCacheProvider bufferCacheProvider = new BufferCacheProvider();
- IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
- IFileMappingProviderProvider fileMappingProviderProvider = new FileMappingProviderProvider();
-
- RecordDescriptor recDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE });
-
- String btreeName = "btree.bin";
- String nc1FileName = System.getProperty("java.io.tmpdir") + "/nc1/" + btreeName;
- IFileSplitProvider btreeSplitProvider = new ConstantFileSplitProvider(
- new FileSplit[] { new FileSplit(NC1_ID, new File(nc1FileName)) } );
-
- BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProvider, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, typeTraits, comparatorFactories, true, new int[]{0}, new int[]{1}, true, true);
- //BTreeDiskOrderScanOperatorDescriptor btreeSearchOp = new BTreeDiskOrderScanOperatorDescriptor(spec, splitProvider, recDesc, bufferCacheProvider, btreeRegistryProvider, 0, "btreetest.bin", interiorFrameFactory, leafFrameFactory, cmp);
-
- PartitionConstraint btreePartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
- btreeSearchOp.setPartitionConstraint(btreePartitionConstraint);
-
- PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
- PartitionConstraint printerPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getKeyFieldCount() * 2); // high
+ // key
+ // and
+ // low
+ // key
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("100", dos); // low
+ // key
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("200", dos); // high
+ // key
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraint keyProviderPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ keyProviderOp.setPartitionConstraint(keyProviderPartitionConstraint);
+ IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
+
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE });
+
+ String btreeName = "btree.bin";
+ String nc1FileName = System.getProperty("java.io.tmpdir") + "/nc1/" + btreeName;
+ IFileSplitProvider btreeSplitProvider = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+ new File(nc1FileName)) });
+
+ BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
+ btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, true, new int[] { 0 }, new int[] { 1 }, true, true);
+ // BTreeDiskOrderScanOperatorDescriptor btreeSearchOp = new
+ // BTreeDiskOrderScanOperatorDescriptor(spec, splitProvider, recDesc,
+ // bufferCacheProvider, btreeRegistryProvider, 0, "btreetest.bin",
+ // interiorFrameFactory, leafFrameFactory, cmp);
+
+ PartitionConstraint btreePartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ btreeSearchOp.setPartitionConstraint(btreePartitionConstraint);
+
+ PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+ PartitionConstraint printerPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
printer.setPartitionConstraint(printerPartitionConstraint);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, btreeSearchOp, 0);
spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
-
+
spec.addRoot(printer);
runTest(spec);
}
-
- @Test
- public void insertTest() throws Exception {
- // relies on the fact that NCs are run from same process
- System.setProperty("NodeControllerDataPath", System.getProperty("java.io.tmpdir") + "/");
-
- JobSpecification spec = new JobSpecification();
-
- FileSplit[] ordersSplits = new FileSplit[] {
- new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl")) };
+
+ @Test
+ public void insertTest() throws Exception {
+ // relies on the fact that NCs are run from same process
+ System.setProperty("NodeControllerDataPath", System.getProperty("java.io.tmpdir") + "/");
+
+ JobSpecification spec = new JobSpecification();
+
+ FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new File("data/tpch0.001/orders-part1.tbl")) };
IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
@@ -270,214 +283,233 @@
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
UTF8StringSerializerDeserializer.INSTANCE });
-
+
FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE }, '|'), ordersDesc);
- PartitionConstraint ordersPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] {
- new AbsoluteLocationConstraint(NC1_ID) });
+ PartitionConstraint ordersPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
ordScanner.setPartitionConstraint(ordersPartitionConstraint);
-
- // we will create a primary index and 2 secondary indexes
- // first create comparators for primary index
- int primaryFieldCount = 6;
- ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
+
+ // we will create a primary index and 2 secondary indexes
+ // first create comparators for primary index
+ int primaryFieldCount = 6;
+ ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
primaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[2] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
primaryTypeTraits[5] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
- int primaryKeyFieldCount = 1;
- IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
- primaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
- TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
- //SimpleTupleWriterFactory primaryTupleWriterFactory = new SimpleTupleWriterFactory();
- IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
- IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
-
- IBufferCacheProvider bufferCacheProvider = new BufferCacheProvider();
- IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
-
- // trick to clear pages of old fileids
- BufferCacheProvider tmp = (BufferCacheProvider)bufferCacheProvider;
- tmp.reset();
-
- // construct a multicomparator for the primary index
- IBinaryComparator[] primaryComparators = new IBinaryComparator[primaryComparatorFactories.length];
- for(int i = 0; i < primaryComparatorFactories.length; i++) {
- primaryComparators[i] = primaryComparatorFactories[i].createBinaryComparator();
- }
-
+
+ int primaryKeyFieldCount = 1;
+ IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+ primaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+ TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
+ // SimpleTupleWriterFactory primaryTupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
+ IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
+ IBTreeRegistryProvider btreeRegistryProvider = new BTreeRegistryProvider();
+
+ // construct a multicomparator for the primary index
+ IBinaryComparator[] primaryComparators = new IBinaryComparator[primaryComparatorFactories.length];
+ for (int i = 0; i < primaryComparatorFactories.length; i++) {
+ primaryComparators[i] = primaryComparatorFactories[i].createBinaryComparator();
+ }
+
MultiComparator primaryCmp = new MultiComparator(primaryTypeTraits, primaryComparators);
-
- // now create comparators for secondary indexes
- int secondaryFieldCount = 2;
- ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
+
+ // now create comparators for secondary indexes
+ int secondaryFieldCount = 2;
+ ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
secondaryTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- secondaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
- int secondaryKeyFieldCount = 2;
- IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
- secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
- secondaryComparatorFactories[1] = UTF8StringBinaryComparatorFactory.INSTANCE;
-
- TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- //SimpleTupleWriterFactory secondaryTupleWriterFactory = new SimpleTupleWriterFactory();
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(secondaryTupleWriterFactory);
- IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
-
- // construct a multicomparator for the secondary indexes
- IBinaryComparator[] secondaryComparators = new IBinaryComparator[secondaryComparatorFactories.length];
- for(int i = 0; i < secondaryComparatorFactories.length; i++) {
- secondaryComparators[i] = secondaryComparatorFactories[i].createBinaryComparator();
- }
-
+ secondaryTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+ int secondaryKeyFieldCount = 2;
+ IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+ secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+ secondaryComparatorFactories[1] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+ TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
+ // SimpleTupleWriterFactory secondaryTupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ secondaryTupleWriterFactory);
+ IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
+
+ // construct a multicomparator for the secondary indexes
+ IBinaryComparator[] secondaryComparators = new IBinaryComparator[secondaryComparatorFactories.length];
+ for (int i = 0; i < secondaryComparatorFactories.length; i++) {
+ secondaryComparators[i] = secondaryComparatorFactories[i].createBinaryComparator();
+ }
+
MultiComparator secondaryCmp = new MultiComparator(secondaryTypeTraits, secondaryComparators);
-
- // we create and register 3 btrees for in an insert pipeline being fed from a filescan op
- IBufferCache bufferCache = bufferCacheProvider.getBufferCache();
- BTreeRegistry btreeRegistry = btreeRegistryProvider.getBTreeRegistry();
- FileManager fileManager = bufferCacheProvider.getFileManager();
- IFileMappingProviderProvider fileMappingProviderProvider = new FileMappingProviderProvider();
-
+
+ // we create and register 3 btrees for in an insert pipeline being fed
+ // from a filescan op
+ BTreeRegistry btreeRegistry = btreeRegistryProvider.getBTreeRegistry();
+ IBufferCache bufferCache = storageManager.getBufferCache();
+ IFileMapProvider fileMapProvider = storageManager.getFileMapProvider();
+
// primary index
- int fileIdA = fileMappingProviderProvider.getFileMappingProvider().mapNameToFileId("/tmp/btreetestA.ix", true);
- File fA = new File("/tmp/btreetestA.ix");
- RandomAccessFile rafA = new RandomAccessFile(fA, "rw");
- FileInfo fiA = new FileInfo(fileIdA, rafA);
- fileManager.registerFile(fiA);
- BTree btreeA = new BTree(bufferCache, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryCmp);
- btreeA.create(fileIdA, primaryLeafFrameFactory.getFrame(), new MetaDataFrame());
+ String fileNameA = "/tmp/btreetestA.ix";
+ bufferCache.createFile(fileNameA);
+ int fileIdA = fileMapProvider.lookupFileId(fileNameA);
+ bufferCache.openFile(fileIdA);
+ BTree btreeA = new BTree(bufferCache, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryCmp);
+ btreeA.create(fileIdA, primaryLeafFrameFactory.getFrame(), new MetaDataFrame());
btreeA.open(fileIdA);
btreeRegistry.register(fileIdA, btreeA);
-
+ bufferCache.closeFile(fileIdA);
+
// first secondary index
- int fileIdB = fileMappingProviderProvider.getFileMappingProvider().mapNameToFileId("/tmp/btreetestB.ix", true);
- File fB = new File("/tmp/btreetestB.ix");
- RandomAccessFile rafB = new RandomAccessFile(fB, "rw");
- FileInfo fiB = new FileInfo(fileIdB, rafB);
- fileManager.registerFile(fiB);
- BTree btreeB = new BTree(bufferCache, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryCmp);
- btreeB.create(fileIdB, secondaryLeafFrameFactory.getFrame(), new MetaDataFrame());
+ String fileNameB = "/tmp/btreetestB.ix";
+ bufferCache.createFile(fileNameB);
+ int fileIdB = fileMapProvider.lookupFileId(fileNameB);
+ bufferCache.openFile(fileIdB);
+ BTree btreeB = new BTree(bufferCache, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryCmp);
+ btreeB.create(fileIdB, secondaryLeafFrameFactory.getFrame(), new MetaDataFrame());
btreeB.open(fileIdB);
btreeRegistry.register(fileIdB, btreeB);
-
+ bufferCache.closeFile(fileIdB);
+
// second secondary index
- int fileIdC = fileMappingProviderProvider.getFileMappingProvider().mapNameToFileId("/tmp/btreetestC.ix", true);
- File fC = new File("/tmp/btreetestC.ix");
- RandomAccessFile rafC = new RandomAccessFile(fC, "rw");
- FileInfo fiC = new FileInfo(fileIdC, rafC);
- fileManager.registerFile(fiC);
- BTree btreeC = new BTree(bufferCache, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryCmp);
- btreeC.create(fileIdC, secondaryLeafFrameFactory.getFrame(), new MetaDataFrame());
+ String fileNameC = "/tmp/btreetestC.ix";
+ bufferCache.createFile(fileNameC);
+ int fileIdC = fileMapProvider.lookupFileId(fileNameC);
+ bufferCache.openFile(fileIdC);
+ BTree btreeC = new BTree(bufferCache, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryCmp);
+ btreeC.create(fileIdC, secondaryLeafFrameFactory.getFrame(), new MetaDataFrame());
btreeC.open(fileIdC);
btreeRegistry.register(fileIdC, btreeC);
-
+ bufferCache.closeFile(fileIdC);
+
// create insert operators
-
- // primary index
- IFileSplitProvider btreeSplitProviderA = new ConstantFileSplitProvider(
- new FileSplit[] { new FileSplit(NC1_ID, new File("/tmp/btreetestA.ix")) } );
- int[] fieldPermutationA = { 0,1,2,3,4,5 };
- BTreeInsertUpdateDeleteOperatorDescriptor insertOpA = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, ordersDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProviderA, fileMappingProviderProvider, primaryInteriorFrameFactory, primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, fieldPermutationA, BTreeOp.BTO_INSERT);
- PartitionConstraint insertPartitionConstraintA = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+
+ // primary index
+ IFileSplitProvider btreeSplitProviderA = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+ new File("/tmp/btreetestA.ix")) });
+ int[] fieldPermutationA = { 0, 1, 2, 3, 4, 5 };
+ BTreeInsertUpdateDeleteOperatorDescriptor insertOpA = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
+ ordersDesc, storageManager, btreeRegistryProvider, btreeSplitProviderA, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, fieldPermutationA,
+ BTreeOp.BTO_INSERT);
+ PartitionConstraint insertPartitionConstraintA = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
insertOpA.setPartitionConstraint(insertPartitionConstraintA);
-
+
// first secondary index
- IFileSplitProvider btreeSplitProviderB = new ConstantFileSplitProvider(
- new FileSplit[] { new FileSplit(NC1_ID, new File("/tmp/btreetestB.ix")) } );
- int[] fieldPermutationB = { 3, 0 };
- BTreeInsertUpdateDeleteOperatorDescriptor insertOpB = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, ordersDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProviderB, fileMappingProviderProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutationB, BTreeOp.BTO_INSERT);
- PartitionConstraint insertPartitionConstraintB = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ IFileSplitProvider btreeSplitProviderB = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+ new File("/tmp/btreetestB.ix")) });
+ int[] fieldPermutationB = { 3, 0 };
+ BTreeInsertUpdateDeleteOperatorDescriptor insertOpB = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
+ ordersDesc, storageManager, btreeRegistryProvider, btreeSplitProviderB, secondaryInteriorFrameFactory,
+ secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutationB,
+ BTreeOp.BTO_INSERT);
+ PartitionConstraint insertPartitionConstraintB = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
insertOpB.setPartitionConstraint(insertPartitionConstraintB);
-
+
// second secondary index
- IFileSplitProvider btreeSplitProviderC = new ConstantFileSplitProvider(
- new FileSplit[] { new FileSplit(NC1_ID, new File("/tmp/btreetestC.ix")) } );
- int[] fieldPermutationC = { 4, 0 };
- BTreeInsertUpdateDeleteOperatorDescriptor insertOpC = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, ordersDesc, bufferCacheProvider, btreeRegistryProvider, btreeSplitProviderC, fileMappingProviderProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutationC, BTreeOp.BTO_INSERT);
- PartitionConstraint insertPartitionConstraintC = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ IFileSplitProvider btreeSplitProviderC = new ConstantFileSplitProvider(new FileSplit[] { new FileSplit(NC1_ID,
+ new File("/tmp/btreetestC.ix")) });
+ int[] fieldPermutationC = { 4, 0 };
+ BTreeInsertUpdateDeleteOperatorDescriptor insertOpC = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
+ ordersDesc, storageManager, btreeRegistryProvider, btreeSplitProviderC, secondaryInteriorFrameFactory,
+ secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutationC,
+ BTreeOp.BTO_INSERT);
+ PartitionConstraint insertPartitionConstraintC = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
insertOpC.setPartitionConstraint(insertPartitionConstraintC);
-
+
NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
- PartitionConstraint nullSinkPartitionConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
+ PartitionConstraint nullSinkPartitionConstraint = new ExplicitPartitionConstraint(
+ new LocationConstraint[] { new AbsoluteLocationConstraint(NC1_ID) });
nullSink.setPartitionConstraint(nullSinkPartitionConstraint);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, insertOpA, 0);
-
+
spec.connect(new OneToOneConnectorDescriptor(spec), insertOpA, 0, insertOpB, 0);
-
- spec.connect(new OneToOneConnectorDescriptor(spec), insertOpB, 0, insertOpC, 0);
-
- spec.connect(new OneToOneConnectorDescriptor(spec), insertOpC, 0, nullSink, 0);
-
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), insertOpB, 0, insertOpC, 0);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), insertOpC, 0, nullSink, 0);
+
spec.addRoot(nullSink);
runTest(spec);
-
- // scan primary index
+
+ // scan primary index
System.out.println("PRINTING PRIMARY INDEX");
+ bufferCache.openFile(fileIdA);
IBTreeCursor scanCursorA = new RangeSearchCursor(primaryLeafFrameFactory.getFrame());
RangePredicate nullPredA = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext opCtxA = btreeA.createOpContext(BTreeOp.BTO_SEARCH, primaryLeafFrameFactory.getFrame(), primaryInteriorFrameFactory.getFrame(), null);
+ BTreeOpContext opCtxA = btreeA.createOpContext(BTreeOp.BTO_SEARCH, primaryLeafFrameFactory.getFrame(),
+ primaryInteriorFrameFactory.getFrame(), null);
btreeA.search(scanCursorA, nullPredA, opCtxA);
try {
- while (scanCursorA.hasNext()) {
- scanCursorA.next();
- ITupleReference frameTuple = scanCursorA.getTuple();
+ while (scanCursorA.hasNext()) {
+ scanCursorA.next();
+ ITupleReference frameTuple = scanCursorA.getTuple();
String rec = primaryCmp.printTuple(frameTuple, ordersDesc.getFields());
- System.out.println(rec);
- }
+ System.out.println(rec);
+ }
} catch (Exception e) {
- e.printStackTrace();
+ e.printStackTrace();
} finally {
- scanCursorA.close();
- }
+ scanCursorA.close();
+ }
+ bufferCache.closeFile(fileIdA);
System.out.println();
-
+
// scan first secondary index
System.out.println("PRINTING FIRST SECONDARY INDEX");
+ bufferCache.openFile(fileIdB);
IBTreeCursor scanCursorB = new RangeSearchCursor(secondaryLeafFrameFactory.getFrame());
RangePredicate nullPredB = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext opCtxB = btreeB.createOpContext(BTreeOp.BTO_SEARCH, secondaryLeafFrameFactory.getFrame(), secondaryInteriorFrameFactory.getFrame(), null);
+ BTreeOpContext opCtxB = btreeB.createOpContext(BTreeOp.BTO_SEARCH, secondaryLeafFrameFactory.getFrame(),
+ secondaryInteriorFrameFactory.getFrame(), null);
btreeB.search(scanCursorB, nullPredB, opCtxB);
try {
- while (scanCursorB.hasNext()) {
- scanCursorB.next();
- ITupleReference frameTuple = scanCursorB.getTuple();
- String rec = secondaryCmp.printTuple(frameTuple, ordersDesc.getFields());
- System.out.println(rec);
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursorB.close();
- }
- System.out.println();
-
- // scan second secondary index
- System.out.println("PRINTING SECOND SECONDARY INDEX");
- IBTreeCursor scanCursorC = new RangeSearchCursor(secondaryLeafFrameFactory.getFrame());
- RangePredicate nullPredC = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext opCtxC = btreeC.createOpContext(BTreeOp.BTO_SEARCH, secondaryLeafFrameFactory.getFrame(), secondaryInteriorFrameFactory.getFrame(), null);
- btreeC.search(scanCursorC, nullPredC, opCtxC);
- try {
- while (scanCursorC.hasNext()) {
- scanCursorC.next();
- ITupleReference frameTuple = scanCursorC.getTuple();
+ while (scanCursorB.hasNext()) {
+ scanCursorB.next();
+ ITupleReference frameTuple = scanCursorB.getTuple();
String rec = secondaryCmp.printTuple(frameTuple, ordersDesc.getFields());
System.out.println(rec);
- }
+ }
} catch (Exception e) {
- e.printStackTrace();
+ e.printStackTrace();
} finally {
- scanCursorC.close();
- }
+ scanCursorB.close();
+ }
+ bufferCache.closeFile(fileIdB);
System.out.println();
- }
+
+ // scan second secondary index
+ System.out.println("PRINTING SECOND SECONDARY INDEX");
+ bufferCache.openFile(fileIdC);
+ IBTreeCursor scanCursorC = new RangeSearchCursor(secondaryLeafFrameFactory.getFrame());
+ RangePredicate nullPredC = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext opCtxC = btreeC.createOpContext(BTreeOp.BTO_SEARCH, secondaryLeafFrameFactory.getFrame(),
+ secondaryInteriorFrameFactory.getFrame(), null);
+ btreeC.search(scanCursorC, nullPredC, opCtxC);
+ try {
+ while (scanCursorC.hasNext()) {
+ scanCursorC.next();
+ ITupleReference frameTuple = scanCursorC.getTuple();
+ String rec = secondaryCmp.printTuple(frameTuple, ordersDesc.getFields());
+ System.out.println(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursorC.close();
+ }
+ bufferCache.closeFile(fileIdC);
+ System.out.println();
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/DummySMI.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/DummySMI.java
new file mode 100644
index 0000000..9baafcc
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/DummySMI.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.api;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class DummySMI implements IStorageManagerInterface {
+
+ private static final long serialVersionUID = 1L;
+
+ private transient IBufferCache bufferCache = null;
+ private transient IFileMapManager fmManager;
+ private int PAGE_SIZE = 8192;
+ private int NUM_PAGES = 40;
+
+ public DummySMI() {
+ }
+
+ public DummySMI(int pageSize, int numPages) {
+ PAGE_SIZE = pageSize;
+ NUM_PAGES = numPages;
+ }
+
+ @Override
+ public synchronized IBufferCache getBufferCache() {
+ if (bufferCache == null) {
+ ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+ IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+ fmManager = new IFileMapManager() {
+ private Map<Integer, String> id2nameMap = new HashMap<Integer, String>();
+ private Map<String, Integer> name2IdMap = new HashMap<String, Integer>();
+ private int idCounter = 0;
+
+ @Override
+ public String lookupFileName(int fileId) throws HyracksDataException {
+ String fName = id2nameMap.get(fileId);
+ if (fName == null) {
+ throw new HyracksDataException("No mapping found for id: " + fileId);
+ }
+ return fName;
+ }
+
+ @Override
+ public int lookupFileId(String fileName) throws HyracksDataException {
+ Integer fileId = name2IdMap.get(fileName);
+ if (fileId == null) {
+ throw new HyracksDataException("No mapping found for name: " + fileName);
+ }
+ return fileId;
+ }
+
+ @Override
+ public boolean isMapped(String fileName) {
+ return name2IdMap.containsKey(fileName);
+ }
+
+ @Override
+ public boolean isMapped(int fileId) {
+ return id2nameMap.containsKey(fileId);
+ }
+
+ @Override
+ public void unregisterFile(int fileId) throws HyracksDataException {
+ String fileName = id2nameMap.remove(fileId);
+ name2IdMap.remove(fileName);
+ }
+
+ @Override
+ public void registerFile(String fileName) throws HyracksDataException {
+ Integer fileId = idCounter++;
+ id2nameMap.put(fileId, fileName);
+ name2IdMap.put(fileName, fileId);
+ }
+ };
+ bufferCache = new BufferCache(allocator, prs, fmManager, PAGE_SIZE, NUM_PAGES);
+ }
+
+ return bufferCache;
+ }
+
+ public IFileMapProvider getFileMapProvider() {
+ return fmManager;
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java
index 47e2fb5..8934921 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java
@@ -20,13 +20,21 @@
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public interface IBTreeCursor {
- public void reset();
- public boolean hasNext() throws Exception;
- public void next() throws Exception;
- public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception;
- public ICachedPage getPage();
- public void close() throws Exception;
- public void setBufferCache(IBufferCache bufferCache);
- public void setFileId(int fileId);
- public ITupleReference getTuple();
+ public void reset();
+
+ public boolean hasNext() throws Exception;
+
+ public void next() throws Exception;
+
+ public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception;
+
+ public ICachedPage getPage();
+
+ public void close() throws Exception;
+
+ public void setBufferCache(IBufferCache bufferCache);
+
+ public void setFileId(int fileId);
+
+ public ITupleReference getTuple();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
index 0ba6e1d..0e15903 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
@@ -26,62 +26,77 @@
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public interface IBTreeFrame {
- public void setPage(ICachedPage page);
- public ICachedPage getPage();
- public ByteBuffer getBuffer();
-
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception;
- public void update(int rid, ITupleReference tuple) throws Exception;
- public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
-
- public void compact(MultiComparator cmp);
- public boolean compress(MultiComparator cmp) throws Exception;
-
- public void initBuffer(byte level);
-
- public int getTupleCount();
-
- // assumption: page must be write-latched at this point
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
- public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp);
-
- public int getTupleOffset(int slotNum);
-
- public int getTotalFreeSpace();
-
- public void setPageLsn(int pageLsn);
- public int getPageLsn();
-
- // for debugging
- public void printHeader();
- public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException;
-
-
- // TODO; what if tuples more than half-page size?
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey) throws Exception;
-
- // TODO: check if we do something nicer than returning object
- public ISlotManager getSlotManager();
-
- // ATTENTION: in b-tree operations it may not always be possible to determine whether an ICachedPage is a leaf or interior node
- // a compatible interior and leaf implementation MUST return identical values when given the same ByteBuffer for the functions below
- public boolean isLeaf();
- public byte getLevel();
- public void setLevel(byte level);
- public boolean getSmFlag(); // structure modification flag
- public void setSmFlag(boolean smFlag);
-
- public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws Exception;
-
- public int getSlotSize();
-
- // TODO: should be removed after new tuple format
- public void setPageTupleFieldCount(int fieldCount);
-
- // for debugging
- public int getFreeSpaceOff();
- public void setFreeSpaceOff(int freeSpace);
-
- public IBTreeTupleWriter getTupleWriter();
-
+ public void setPage(ICachedPage page);
+
+ public ICachedPage getPage();
+
+ public ByteBuffer getBuffer();
+
+ public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception;
+
+ public void update(int rid, ITupleReference tuple) throws Exception;
+
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
+
+ public void compact(MultiComparator cmp);
+
+ public boolean compress(MultiComparator cmp) throws HyracksDataException;
+
+ public void initBuffer(byte level);
+
+ public int getTupleCount();
+
+ // assumption: page must be write-latched at this point
+ public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
+
+ public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp);
+
+ public int getTupleOffset(int slotNum);
+
+ public int getTotalFreeSpace();
+
+ public void setPageLsn(int pageLsn);
+
+ public int getPageLsn();
+
+ // for debugging
+ public void printHeader();
+
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException;
+
+ // TODO; what if tuples more than half-page size?
+ public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ throws Exception;
+
+ // TODO: check if we do something nicer than returning object
+ public ISlotManager getSlotManager();
+
+ // ATTENTION: in b-tree operations it may not always be possible to
+ // determine whether an ICachedPage is a leaf or interior node
+ // a compatible interior and leaf implementation MUST return identical
+ // values when given the same ByteBuffer for the functions below
+ public boolean isLeaf();
+
+ public byte getLevel();
+
+ public void setLevel(byte level);
+
+ public boolean getSmFlag(); // structure modification flag
+
+ public void setSmFlag(boolean smFlag);
+
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
+
+ public int getSlotSize();
+
+ // TODO: should be removed after new tuple format
+ public void setPageTupleFieldCount(int fieldCount);
+
+ // for debugging
+ public int getFreeSpaceOff();
+
+ public void setFreeSpaceOff(int freeSpace);
+
+ public IBTreeTupleWriter getTupleWriter();
+
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
index ee6e94b..675bee3 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
@@ -19,9 +19,13 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
public interface IBTreeInteriorFrame extends IBTreeFrame {
- public int getChildPageId(RangePredicate pred, MultiComparator srcCmp);
- public int getLeftmostChildPageId(MultiComparator cmp);
- public int getRightmostChildPageId(MultiComparator cmp);
- public void setRightmostChildPageId(int pageId);
- public void deleteGreatest(MultiComparator cmp);
+ public int getChildPageId(RangePredicate pred, MultiComparator srcCmp);
+
+ public int getLeftmostChildPageId(MultiComparator cmp);
+
+ public int getRightmostChildPageId(MultiComparator cmp);
+
+ public void setRightmostChildPageId(int pageId);
+
+ public void deleteGreatest(MultiComparator cmp);
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java
index 8383fed..bddd636 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java
@@ -17,6 +17,6 @@
import java.io.Serializable;
-public interface IBTreeInteriorFrameFactory extends Serializable {
- public IBTreeInteriorFrame getFrame();
+public interface IBTreeInteriorFrameFactory extends Serializable {
+ public IBTreeInteriorFrame getFrame();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
index 5fb549c..9eb449c 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -20,14 +20,17 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-public interface IBTreeLeafFrame extends IBTreeFrame {
- public void setNextLeaf(int nextPage);
- public int getNextLeaf();
-
- public void setPrevLeaf(int prevPage);
- public int getPrevLeaf();
-
- public IBTreeTupleReference createTupleReference();
-
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp, FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp);
+public interface IBTreeLeafFrame extends IBTreeFrame {
+ public void setNextLeaf(int nextPage);
+
+ public int getNextLeaf();
+
+ public void setPrevLeaf(int prevPage);
+
+ public int getPrevLeaf();
+
+ public IBTreeTupleReference createTupleReference();
+
+ public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp);
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
index 0855a72..50733de 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
@@ -17,6 +17,6 @@
import java.io.Serializable;
-public interface IBTreeLeafFrameFactory extends Serializable {
- public IBTreeLeafFrame getFrame();
+public interface IBTreeLeafFrameFactory extends Serializable {
+ public IBTreeLeafFrame getFrame();
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java
index 975ee10..58739e2 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java
@@ -19,20 +19,26 @@
public interface IBTreeMetaDataFrame {
public void initBuffer(int level);
-
+
public void setPage(ICachedPage page);
+
public ICachedPage getPage();
-
+
public byte getLevel();
+
public void setLevel(byte level);
-
+
public int getNextPage();
+
public void setNextPage(int nextPage);
-
+
public int getMaxPage();
+
public void setMaxPage(int maxPage);
-
+
public int getFreePage();
+
public boolean hasSpace();
+
public void addFreePage(int freePage);
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
index bc9dea5..2912cf9 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
@@ -15,6 +15,6 @@
package edu.uci.ics.hyracks.storage.am.btree.api;
-public interface IBTreeMetaDataFrameFactory {
+public interface IBTreeMetaDataFrameFactory {
public IBTreeMetaDataFrame getFrame();
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
index bbeb6a6..38d6217 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.api;
import java.nio.ByteBuffer;
@@ -5,8 +20,11 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface IBTreeTupleReference extends ITupleReference {
- public void setFieldCount(int fieldCount);
- public void setFieldCount(int fieldStartIndex, int fieldCount);
- public void resetByOffset(ByteBuffer buf, int tupleStartOffset);
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex);
+ public void setFieldCount(int fieldCount);
+
+ public void setFieldCount(int fieldStartIndex, int fieldCount);
+
+ public void resetByOffset(ByteBuffer buf, int tupleStartOffset);
+
+ public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex);
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java
index 98c29f3..d8cb9af 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java
@@ -1,17 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.api;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-public interface IBTreeTupleWriter {
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
- public int bytesRequired(ITupleReference tuple);
-
- public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf, int targetOff);
- public int bytesRequired(ITupleReference tuple, int startField, int numFields);
-
- // return a tuplereference instance that can read the tuple written by this writer
- // the main idea is that the format of the written tuple may not be the same as the format written by this writer
- public IBTreeTupleReference createTupleReference();
+public interface IBTreeTupleWriter {
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
+
+ public int bytesRequired(ITupleReference tuple);
+
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff);
+
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields);
+
+ // return a tuplereference instance that can read the tuple written by this
+ // writer
+ // the main idea is that the format of the written tuple may not be the same
+ // as the format written by this writer
+ public IBTreeTupleReference createTupleReference();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java
index fb08a79..645c430 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java
@@ -1,7 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.api;
import java.io.Serializable;
public interface IBTreeTupleWriterFactory extends Serializable {
- public IBTreeTupleWriter createTupleWriter();
+ public IBTreeTupleWriter createTupleWriter();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
index dd981c1..51e67ca 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
@@ -19,5 +19,5 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
public interface IFrameCompressor {
- public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+ public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
index d8459a4..f45406f 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
@@ -42,31 +42,39 @@
// all prefixes are recomputed during a reorg or compaction
public interface IPrefixSlotManager {
- public void setFrame(FieldPrefixNSMLeafFrame frame);
-
- public int decodeFirstSlotField(int slot);
- public int decodeSecondSlotField(int slot);
- public int encodeSlotFields(int firstField, int secondField);
-
- public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
- public int insertSlot(int slot, int tupleOff);
-
- // returns prefix slot number, returns TUPLE_UNCOMPRESSED if none found
- public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp);
-
- public int getTupleSlotStartOff();
- public int getTupleSlotEndOff();
-
- public int getPrefixSlotStartOff();
- public int getPrefixSlotEndOff();
-
- public int getTupleSlotOff(int tupleIndex);
- public int getPrefixSlotOff(int tupleIndex);
-
- public int getSlotSize();
-
- public void setSlot(int offset, int value);
-
- // functions for testing
- public void setPrefixSlot(int tupleIndex, int slot);
+ public void setFrame(FieldPrefixNSMLeafFrame frame);
+
+ public int decodeFirstSlotField(int slot);
+
+ public int decodeSecondSlotField(int slot);
+
+ public int encodeSlotFields(int firstField, int secondField);
+
+ public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple,
+ IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+ FindTupleNoExactMatchPolicy matchPolicy);
+
+ public int insertSlot(int slot, int tupleOff);
+
+ // returns prefix slot number, returns TUPLE_UNCOMPRESSED if none found
+ public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp);
+
+ public int getTupleSlotStartOff();
+
+ public int getTupleSlotEndOff();
+
+ public int getPrefixSlotStartOff();
+
+ public int getPrefixSlotEndOff();
+
+ public int getTupleSlotOff(int tupleIndex);
+
+ public int getPrefixSlotOff(int tupleIndex);
+
+ public int getSlotSize();
+
+ public void setSlot(int offset, int value);
+
+ // functions for testing
+ public void setPrefixSlot(int tupleIndex, int slot);
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java
index 334f2f3..255c370 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java
@@ -18,5 +18,5 @@
import java.io.Serializable;
public interface ISearchPredicate extends Serializable {
- public boolean isForward();
+ public boolean isForward();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java
index cbb2d86..c1d4e0a 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java
@@ -21,18 +21,22 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
public interface ISlotManager {
- public void setFrame(IBTreeFrame frame);
-
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp, FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
- public int insertSlot(int tupleIndex, int tupleOff);
-
- public int getSlotStartOff();
- public int getSlotEndOff();
-
- public int getTupleOff(int slotOff);
- public void setSlot(int slotOff, int value);
-
- public int getSlotOff(int tupleIndex);
-
- public int getSlotSize();
+ public void setFrame(IBTreeFrame frame);
+
+ public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp,
+ FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
+
+ public int insertSlot(int tupleIndex, int tupleOff);
+
+ public int getSlotStartOff();
+
+ public int getSlotEndOff();
+
+ public int getTupleOff(int slotOff);
+
+ public void setSlot(int slotOff, int value);
+
+ public int getSlotOff(int tupleIndex);
+
+ public int getSlotSize();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
index ecc177d..eb6c0ab 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -31,410 +31,481 @@
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
public class FieldPrefixCompressor implements IFrameCompressor {
-
- // minimum ratio of uncompressed tuples to total tuple to consider re-compression
- private float ratioThreshold;
-
- // minimum number of tuple matching field prefixes to consider compressing them
- private int occurrenceThreshold;
-
- private ITypeTrait[] typeTraits;
-
- public FieldPrefixCompressor(ITypeTrait[] typeTraits, float ratioThreshold, int occurrenceThreshold) {
- this.typeTraits = typeTraits;
- this.ratioThreshold = ratioThreshold;
- this.occurrenceThreshold = occurrenceThreshold;
- }
-
- @Override
- public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception {
- int tupleCount = frame.getTupleCount();
- if(tupleCount <= 0) {
+
+ // minimum ratio of uncompressed tuples to total tuple to consider
+ // re-compression
+ private float ratioThreshold;
+
+ // minimum number of tuple matching field prefixes to consider compressing
+ // them
+ private int occurrenceThreshold;
+
+ private ITypeTrait[] typeTraits;
+
+ public FieldPrefixCompressor(ITypeTrait[] typeTraits, float ratioThreshold, int occurrenceThreshold) {
+ this.typeTraits = typeTraits;
+ this.ratioThreshold = ratioThreshold;
+ this.occurrenceThreshold = occurrenceThreshold;
+ }
+
+ @Override
+ public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception {
+ int tupleCount = frame.getTupleCount();
+ if (tupleCount <= 0) {
frame.setPrefixTupleCount(0);
frame.setFreeSpaceOff(frame.getOrigFreeSpaceOff());
- frame.setTotalFreeSpace(frame.getOrigTotalFreeSpace());
+ frame.setTotalFreeSpace(frame.getOrigTotalFreeSpace());
return false;
}
-
- int uncompressedTupleCount = frame.getUncompressedTupleCount();
- float ratio = (float)uncompressedTupleCount / (float)tupleCount;
- if(ratio < ratioThreshold) return false;
-
+
+ int uncompressedTupleCount = frame.getUncompressedTupleCount();
+ float ratio = (float) uncompressedTupleCount / (float) tupleCount;
+ if (ratio < ratioThreshold)
+ return false;
+
IBinaryComparator[] cmps = cmp.getComparators();
int fieldCount = cmp.getKeyFieldCount();
-
+
ByteBuffer buf = frame.getBuffer();
byte[] pageArray = buf.array();
IPrefixSlotManager slotManager = frame.slotManager;
-
+
// perform analysis pass
ArrayList<KeyPartition> keyPartitions = getKeyPartitions(frame, cmp, occurrenceThreshold);
- if(keyPartitions.size() == 0) return false;
-
- // for each keyPartition, determine the best prefix length for compression, and count how many prefix tuple we would need in total
+ if (keyPartitions.size() == 0)
+ return false;
+
+ // for each keyPartition, determine the best prefix length for
+ // compression, and count how many prefix tuple we would need in total
int totalSlotsNeeded = 0;
int totalPrefixBytes = 0;
- for(KeyPartition kp : keyPartitions) {
-
- for(int j = 0; j < kp.pmi.length; j++) {
- int benefitMinusCost = kp.pmi[j].spaceBenefit - kp.pmi[j].spaceCost;
- if(benefitMinusCost > kp.maxBenefitMinusCost) {
+ for (KeyPartition kp : keyPartitions) {
+
+ for (int j = 0; j < kp.pmi.length; j++) {
+ int benefitMinusCost = kp.pmi[j].spaceBenefit - kp.pmi[j].spaceCost;
+ if (benefitMinusCost > kp.maxBenefitMinusCost) {
kp.maxBenefitMinusCost = benefitMinusCost;
kp.maxPmiIndex = j;
}
}
-
- // ignore keyPartitions with no benefit and don't count bytes and slots needed
- if(kp.maxBenefitMinusCost <= 0) continue;
-
+
+ // ignore keyPartitions with no benefit and don't count bytes and
+ // slots needed
+ if (kp.maxBenefitMinusCost <= 0)
+ continue;
+
totalPrefixBytes += kp.pmi[kp.maxPmiIndex].prefixBytes;
totalSlotsNeeded += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
}
-
- //System.out.println("TOTAL SLOTS NEEDED: " + totalSlotsNeeded);
-
+
+ // System.out.println("TOTAL SLOTS NEEDED: " + totalSlotsNeeded);
+
// we use a greedy heuristic to solve this "knapsack"-like problem
- // (every keyPartition has a space savings and a number of slots required, but we the number of slots are constrained by MAX_PREFIX_SLOTS)
- // we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded and later choose the top MAX_PREFIX_SLOTS
+ // (every keyPartition has a space savings and a number of slots
+ // required, but we the number of slots are constrained by
+ // MAX_PREFIX_SLOTS)
+ // we sort the keyPartitions by maxBenefitMinusCost / prefixSlotsNeeded
+ // and later choose the top MAX_PREFIX_SLOTS
int[] newPrefixSlots;
- if(totalSlotsNeeded > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
- // order keyPartitions by the heuristic function
+ if (totalSlotsNeeded > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
+ // order keyPartitions by the heuristic function
SortByHeuristic heuristicComparator = new SortByHeuristic();
Collections.sort(keyPartitions, heuristicComparator);
int slotsUsed = 0;
int numberKeyPartitions = -1;
- for(int i = 0; i < keyPartitions.size(); i++) {
- KeyPartition kp = keyPartitions.get(i);
+ for (int i = 0; i < keyPartitions.size(); i++) {
+ KeyPartition kp = keyPartitions.get(i);
slotsUsed += kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
- if(slotsUsed > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
+ if (slotsUsed > FieldPrefixSlotManager.MAX_PREFIX_SLOTS) {
numberKeyPartitions = i + 1;
slotsUsed -= kp.pmi[kp.maxPmiIndex].prefixSlotsNeeded;
break;
- }
+ }
}
newPrefixSlots = new int[slotsUsed];
-
+
// remove irrelevant keyPartitions and adjust total prefix bytes
- while(keyPartitions.size() >= numberKeyPartitions) {
- int lastIndex = keyPartitions.size() - 1;
- KeyPartition kp = keyPartitions.get(lastIndex);
- if(kp.maxBenefitMinusCost > 0) totalPrefixBytes -= kp.pmi[kp.maxPmiIndex].prefixBytes;
- keyPartitions.remove(lastIndex);
+ while (keyPartitions.size() >= numberKeyPartitions) {
+ int lastIndex = keyPartitions.size() - 1;
+ KeyPartition kp = keyPartitions.get(lastIndex);
+ if (kp.maxBenefitMinusCost > 0)
+ totalPrefixBytes -= kp.pmi[kp.maxPmiIndex].prefixBytes;
+ keyPartitions.remove(lastIndex);
}
-
- // re-order keyPartitions by prefix (corresponding to original order)
+
+ // re-order keyPartitions by prefix (corresponding to original
+ // order)
SortByOriginalRank originalRankComparator = new SortByOriginalRank();
- Collections.sort(keyPartitions, originalRankComparator);
- }
- else {
+ Collections.sort(keyPartitions, originalRankComparator);
+ } else {
newPrefixSlots = new int[totalSlotsNeeded];
}
-
+
int[] newTupleSlots = new int[tupleCount];
-
+
// WARNING: our hope is that compression is infrequent
- // here we allocate a big chunk of memory to temporary hold the new, re-compressed tuple
- // in general it is very hard to avoid this step
+ // here we allocate a big chunk of memory to temporary hold the new,
+ // re-compressed tuple
+ // in general it is very hard to avoid this step
int prefixFreeSpace = frame.getOrigFreeSpaceOff();
int tupleFreeSpace = prefixFreeSpace + totalPrefixBytes;
- byte[] buffer = new byte[buf.capacity()];
+ byte[] buffer = new byte[buf.capacity()];
ByteBuffer byteBuffer = ByteBuffer.wrap(buffer);
-
+
// perform compression, and reorg
- // we assume that the keyPartitions are sorted by the prefixes (i.e., in the logical target order)
+ // we assume that the keyPartitions are sorted by the prefixes (i.e., in
+ // the logical target order)
int kpIndex = 0;
int tupleIndex = 0;
int prefixTupleIndex = 0;
uncompressedTupleCount = 0;
-
- TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-
- FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- tupleToWrite.setFieldCount(fieldCount);
-
- while(tupleIndex < tupleCount) {
- if(kpIndex < keyPartitions.size()) {
-
- // beginning of keyPartition found, compress entire keyPartition
- if(tupleIndex == keyPartitions.get(kpIndex).firstTupleIndex) {
-
- // number of fields we decided to use for compression of this keyPartition
- int numFieldsToCompress = keyPartitions.get(kpIndex).maxPmiIndex + 1;
- int segmentStart = keyPartitions.get(kpIndex).firstTupleIndex;
- int tuplesInSegment = 1;
-
- //System.out.println("PROCESSING KEYPARTITION: " + kpIndex + " RANGE: " + keyPartitions.get(kpIndex).firstRecSlotNum + " " + keyPartitions.get(kpIndex).lastRecSlotNum + " FIELDSTOCOMPRESS: " + numFieldsToCompress);
-
- FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- prevTuple.setFieldCount(fieldCount);
-
- FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- tuple.setFieldCount(fieldCount);
-
- for(int i = tupleIndex + 1; i <= keyPartitions.get(kpIndex).lastTupleIndex; i++) {
- prevTuple.resetByTupleIndex(frame, i - 1);
- tuple.resetByTupleIndex(frame, i);
-
- // check if tuples match in numFieldsToCompress of their first fields
- int prefixFieldsMatch = 0;
- for(int j = 0; j < numFieldsToCompress; j++) {
- if(cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray, tuple.getFieldStart(j), tuple.getFieldLength(j)) == 0) prefixFieldsMatch++;
- else break;
- }
-
- // the two tuples must match in exactly the number of fields we decided to compress for this keyPartition
- int processSegments = 0;
- if(prefixFieldsMatch == numFieldsToCompress) tuplesInSegment++;
- else processSegments++;
- if(i == keyPartitions.get(kpIndex).lastTupleIndex) processSegments++;
-
- for(int r = 0; r < processSegments; r++) {
- // compress current segment and then start new segment
- if(tuplesInSegment < occurrenceThreshold || numFieldsToCompress <= 0) {
- // segment does not have at least occurrenceThreshold tuples, so write tuples uncompressed
- for(int j = 0; j < tuplesInSegment; j++) {
- int slotNum = segmentStart + j;
- tupleToWrite.resetByTupleIndex(frame, slotNum);
- newTupleSlots[tupleCount - 1 - slotNum] = slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
- tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
- }
- uncompressedTupleCount += tuplesInSegment;
- }
- else {
- // segment has enough tuples, compress segment
- // extract prefix, write prefix tuple to buffer, and set prefix slot
- newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager.encodeSlotFields(numFieldsToCompress, prefixFreeSpace);
- //int tmp = freeSpace;
- //prevRec.reset();
- //System.out.println("SOURCE CONTENTS: " + buf.getInt(prevRec.getFieldOff()) + " " + buf.getInt(prevRec.getFieldOff()+4));
- prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, numFieldsToCompress, byteBuffer, prefixFreeSpace);
- //System.out.println("WRITING PREFIX RECORD " + prefixSlotNum + " AT " + tmp + " " + freeSpace);
- //System.out.print("CONTENTS: ");
- //for(int x = 0; x < numFieldsToCompress; x++) System.out.print(buf.getInt(tmp + x*4) + " ");
- //System.out.println();
-
- // truncate tuples, write them to buffer, and set tuple slots
- for(int j = 0; j < tuplesInSegment; j++) {
- int currTupleIndex = segmentStart + j;
- tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
- newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(prefixTupleIndex, tupleFreeSpace);
- tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, numFieldsToCompress, fieldCount - numFieldsToCompress, byteBuffer, tupleFreeSpace);
+ TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+
+ FieldPrefixTupleReference tupleToWrite = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+ tupleToWrite.setFieldCount(fieldCount);
+
+ while (tupleIndex < tupleCount) {
+ if (kpIndex < keyPartitions.size()) {
+
+ // beginning of keyPartition found, compress entire keyPartition
+ if (tupleIndex == keyPartitions.get(kpIndex).firstTupleIndex) {
+
+ // number of fields we decided to use for compression of
+ // this keyPartition
+ int numFieldsToCompress = keyPartitions.get(kpIndex).maxPmiIndex + 1;
+ int segmentStart = keyPartitions.get(kpIndex).firstTupleIndex;
+ int tuplesInSegment = 1;
+
+ // System.out.println("PROCESSING KEYPARTITION: " + kpIndex
+ // + " RANGE: " + keyPartitions.get(kpIndex).firstRecSlotNum
+ // + " " + keyPartitions.get(kpIndex).lastRecSlotNum +
+ // " FIELDSTOCOMPRESS: " + numFieldsToCompress);
+
+ FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter
+ .createTupleReference());
+ prevTuple.setFieldCount(fieldCount);
+
+ FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+ tuple.setFieldCount(fieldCount);
+
+ for (int i = tupleIndex + 1; i <= keyPartitions.get(kpIndex).lastTupleIndex; i++) {
+ prevTuple.resetByTupleIndex(frame, i - 1);
+ tuple.resetByTupleIndex(frame, i);
+
+ // check if tuples match in numFieldsToCompress of their
+ // first fields
+ int prefixFieldsMatch = 0;
+ for (int j = 0; j < numFieldsToCompress; j++) {
+ if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j),
+ pageArray, tuple.getFieldStart(j), tuple.getFieldLength(j)) == 0)
+ prefixFieldsMatch++;
+ else
+ break;
+ }
+
+ // the two tuples must match in exactly the number of
+ // fields we decided to compress for this keyPartition
+ int processSegments = 0;
+ if (prefixFieldsMatch == numFieldsToCompress)
+ tuplesInSegment++;
+ else
+ processSegments++;
+
+ if (i == keyPartitions.get(kpIndex).lastTupleIndex)
+ processSegments++;
+
+ for (int r = 0; r < processSegments; r++) {
+ // compress current segment and then start new
+ // segment
+ if (tuplesInSegment < occurrenceThreshold || numFieldsToCompress <= 0) {
+ // segment does not have at least
+ // occurrenceThreshold tuples, so write tuples
+ // uncompressed
+ for (int j = 0; j < tuplesInSegment; j++) {
+ int slotNum = segmentStart + j;
+ tupleToWrite.resetByTupleIndex(frame, slotNum);
+ newTupleSlots[tupleCount - 1 - slotNum] = slotManager.encodeSlotFields(
+ FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
}
-
- prefixTupleIndex++;
- }
-
- // begin new segment
- segmentStart = i;
- tuplesInSegment = 1;
- }
+ uncompressedTupleCount += tuplesInSegment;
+ } else {
+ // segment has enough tuples, compress segment
+ // extract prefix, write prefix tuple to buffer,
+ // and set prefix slot
+ newPrefixSlots[newPrefixSlots.length - 1 - prefixTupleIndex] = slotManager
+ .encodeSlotFields(numFieldsToCompress, prefixFreeSpace);
+ // int tmp = freeSpace;
+ // prevRec.reset();
+ // System.out.println("SOURCE CONTENTS: " +
+ // buf.getInt(prevRec.getFieldOff()) + " " +
+ // buf.getInt(prevRec.getFieldOff()+4));
+ prefixFreeSpace += tupleWriter.writeTupleFields(prevTuple, 0, numFieldsToCompress,
+ byteBuffer, prefixFreeSpace);
+ // System.out.println("WRITING PREFIX RECORD " +
+ // prefixSlotNum + " AT " + tmp + " " +
+ // freeSpace);
+ // System.out.print("CONTENTS: ");
+ // for(int x = 0; x < numFieldsToCompress; x++)
+ // System.out.print(buf.getInt(tmp + x*4) +
+ // " ");
+ // System.out.println();
+
+ // truncate tuples, write them to buffer, and
+ // set tuple slots
+ for (int j = 0; j < tuplesInSegment; j++) {
+ int currTupleIndex = segmentStart + j;
+ tupleToWrite.resetByTupleIndex(frame, currTupleIndex);
+ newTupleSlots[tupleCount - 1 - currTupleIndex] = slotManager.encodeSlotFields(
+ prefixTupleIndex, tupleFreeSpace);
+ tupleFreeSpace += tupleWriter.writeTupleFields(tupleToWrite, numFieldsToCompress,
+ fieldCount - numFieldsToCompress, byteBuffer, tupleFreeSpace);
+ }
+
+ prefixTupleIndex++;
+ }
+
+ // begin new segment
+ segmentStart = i;
+ tuplesInSegment = 1;
+ }
}
-
+
tupleIndex = keyPartitions.get(kpIndex).lastTupleIndex;
- kpIndex++;
- }
- else {
+ kpIndex++;
+ } else {
// just write the tuple uncompressed
- tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
- tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
- uncompressedTupleCount++;
- }
- }
- else {
+ tupleToWrite.resetByTupleIndex(frame, tupleIndex);
+ newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
+ FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
+ uncompressedTupleCount++;
+ }
+ } else {
// just write the tuple uncompressed
- tupleToWrite.resetByTupleIndex(frame, tupleIndex);
- newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
+ tupleToWrite.resetByTupleIndex(frame, tupleIndex);
+ newTupleSlots[tupleCount - 1 - tupleIndex] = slotManager.encodeSlotFields(
+ FieldPrefixSlotManager.TUPLE_UNCOMPRESSED, tupleFreeSpace);
tupleFreeSpace += tupleWriter.writeTuple(tupleToWrite, byteBuffer, tupleFreeSpace);
uncompressedTupleCount++;
- }
+ }
tupleIndex++;
- }
-
- // sanity check to see if we have written exactly as many prefix bytes as computed before
- if(prefixFreeSpace != frame.getOrigFreeSpaceOff() + totalPrefixBytes) {
- throw new Exception("ERROR: Number of prefix bytes written don't match computed number");
}
-
- // in some rare instances our procedure could even increase the space requirement which is very dangerous
- // this can happen to to the greedy solution of the knapsack-like problem
- // therefore, we check if the new space exceeds the page size to avoid the only danger of an increasing space
- int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length * slotManager.getSlotSize();
- if(totalSpace > buf.capacity()) return false; // just leave the page as is
-
+
+ // sanity check to see if we have written exactly as many prefix bytes
+ // as computed before
+ if (prefixFreeSpace != frame.getOrigFreeSpaceOff() + totalPrefixBytes) {
+ throw new Exception("ERROR: Number of prefix bytes written don't match computed number");
+ }
+
+ // in some rare instances our procedure could even increase the space
+ // requirement which is very dangerous
+ // this can happen to to the greedy solution of the knapsack-like
+ // problem
+ // therefore, we check if the new space exceeds the page size to avoid
+ // the only danger of an increasing space
+ int totalSpace = tupleFreeSpace + newTupleSlots.length * slotManager.getSlotSize() + newPrefixSlots.length
+ * slotManager.getSlotSize();
+ if (totalSpace > buf.capacity())
+ return false; // just leave the page as is
+
// copy new tuple and new slots into original page
int freeSpaceAfterInit = frame.getOrigFreeSpaceOff();
- System.arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace - freeSpaceAfterInit);
-
+ System
+ .arraycopy(buffer, freeSpaceAfterInit, pageArray, freeSpaceAfterInit, tupleFreeSpace
+ - freeSpaceAfterInit);
+
// copy prefix slots
int slotOffRunner = buf.capacity() - slotManager.getSlotSize();
- for(int i = 0; i < newPrefixSlots.length; i++) {
+ for (int i = 0; i < newPrefixSlots.length; i++) {
buf.putInt(slotOffRunner, newPrefixSlots[newPrefixSlots.length - 1 - i]);
slotOffRunner -= slotManager.getSlotSize();
}
-
+
// copy tuple slots
- for(int i = 0; i < newTupleSlots.length; i++) {
+ for (int i = 0; i < newTupleSlots.length; i++) {
buf.putInt(slotOffRunner, newTupleSlots[newTupleSlots.length - 1 - i]);
slotOffRunner -= slotManager.getSlotSize();
}
-
-// int originalFreeSpaceOff = frame.getOrigFreeSpaceOff();
-// System.out.println("ORIGINALFREESPACE: " + originalFreeSpaceOff);
-// System.out.println("RECSPACE BEF: " + (frame.getFreeSpaceOff() - originalFreeSpaceOff));
-// System.out.println("RECSPACE AFT: " + (recordFreeSpace - originalFreeSpaceOff));
-// System.out.println("PREFIXSLOTS BEF: " + frame.getNumPrefixRecords());
-// System.out.println("PREFIXSLOTS AFT: " + newPrefixSlots.length);
-//
-// System.out.println("FREESPACE BEF: " + frame.getFreeSpaceOff());
-// System.out.println("FREESPACE AFT: " + recordFreeSpace);
-// System.out.println("PREFIXES: " + newPrefixSlots.length + " / " + FieldPrefixSlotManager.MAX_PREFIX_SLOTS);
-// System.out.println("RECORDS: " + newRecordSlots.length);
-
- // update space fields, TODO: we need to update more fields
+
+ // int originalFreeSpaceOff = frame.getOrigFreeSpaceOff();
+ // System.out.println("ORIGINALFREESPACE: " + originalFreeSpaceOff);
+ // System.out.println("RECSPACE BEF: " + (frame.getFreeSpaceOff() -
+ // originalFreeSpaceOff));
+ // System.out.println("RECSPACE AFT: " + (recordFreeSpace -
+ // originalFreeSpaceOff));
+ // System.out.println("PREFIXSLOTS BEF: " +
+ // frame.getNumPrefixRecords());
+ // System.out.println("PREFIXSLOTS AFT: " + newPrefixSlots.length);
+ //
+ // System.out.println("FREESPACE BEF: " + frame.getFreeSpaceOff());
+ // System.out.println("FREESPACE AFT: " + recordFreeSpace);
+ // System.out.println("PREFIXES: " + newPrefixSlots.length + " / " +
+ // FieldPrefixSlotManager.MAX_PREFIX_SLOTS);
+ // System.out.println("RECORDS: " + newRecordSlots.length);
+
+ // update space fields, TODO: we need to update more fields
frame.setFreeSpaceOff(tupleFreeSpace);
frame.setPrefixTupleCount(newPrefixSlots.length);
frame.setUncompressedTupleCount(uncompressedTupleCount);
- int totalFreeSpace = buf.capacity() - tupleFreeSpace - ((newTupleSlots.length + newPrefixSlots.length) * slotManager.getSlotSize());
+ int totalFreeSpace = buf.capacity() - tupleFreeSpace
+ - ((newTupleSlots.length + newPrefixSlots.length) * slotManager.getSlotSize());
frame.setTotalFreeSpace(totalFreeSpace);
-
+
return true;
}
-
- // we perform an analysis pass over the tuples to determine the costs and benefits of different compression options
+
+ // we perform an analysis pass over the tuples to determine the costs and
+ // benefits of different compression options
// a "keypartition" is a range of tuples that has an identical first field
// for each keypartition we chose a prefix length to use for compression
- // i.e., all tuples in a keypartition will be compressed based on the same prefix length (number of fields)
+ // i.e., all tuples in a keypartition will be compressed based on the same
+ // prefix length (number of fields)
// the prefix length may be different for different keypartitions
- // the occurrenceThreshold determines the minimum number of tuples that must share a common prefix in order for us to consider compressing them
- private ArrayList<KeyPartition> getKeyPartitions(FieldPrefixNSMLeafFrame frame, MultiComparator cmp, int occurrenceThreshold) {
- IBinaryComparator[] cmps = cmp.getComparators();
+ // the occurrenceThreshold determines the minimum number of tuples that must
+ // share a common prefix in order for us to consider compressing them
+ private ArrayList<KeyPartition> getKeyPartitions(FieldPrefixNSMLeafFrame frame, MultiComparator cmp,
+ int occurrenceThreshold) {
+ IBinaryComparator[] cmps = cmp.getComparators();
int fieldCount = cmp.getKeyFieldCount();
-
+
int maxCmps = cmps.length - 1;
ByteBuffer buf = frame.getBuffer();
byte[] pageArray = buf.array();
IPrefixSlotManager slotManager = frame.slotManager;
-
- ArrayList<KeyPartition> keyPartitions = new ArrayList<KeyPartition>();
- KeyPartition kp = new KeyPartition(maxCmps);
+
+ ArrayList<KeyPartition> keyPartitions = new ArrayList<KeyPartition>();
+ KeyPartition kp = new KeyPartition(maxCmps);
keyPartitions.add(kp);
-
+
TypeAwareTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
-
+
FieldPrefixTupleReference prevTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- prevTuple.setFieldCount(fieldCount);
-
- FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- tuple.setFieldCount(fieldCount);
-
- kp.firstTupleIndex = 0;
+ prevTuple.setFieldCount(fieldCount);
+
+ FieldPrefixTupleReference tuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+ tuple.setFieldCount(fieldCount);
+
+ kp.firstTupleIndex = 0;
int tupleCount = frame.getTupleCount();
- for(int i = 1; i < tupleCount; i++) {
- prevTuple.resetByTupleIndex(frame, i - 1);
- tuple.resetByTupleIndex(frame, i);
-
- //System.out.println("BEFORE RECORD: " + i + " " + rec.recSlotOff + " " + rec.recOff);
- //kp.print();
-
- int prefixFieldsMatch = 0;
- for(int j = 0; j < maxCmps; j++) {
-
- if(cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray, tuple.getFieldStart(j), prevTuple.getFieldLength(j)) == 0) {
+ for (int i = 1; i < tupleCount; i++) {
+ prevTuple.resetByTupleIndex(frame, i - 1);
+ tuple.resetByTupleIndex(frame, i);
+
+ // System.out.println("BEFORE RECORD: " + i + " " + rec.recSlotOff +
+ // " " + rec.recOff);
+ // kp.print();
+
+ int prefixFieldsMatch = 0;
+ for (int j = 0; j < maxCmps; j++) {
+
+ if (cmps[j].compare(pageArray, prevTuple.getFieldStart(j), prevTuple.getFieldLength(j), pageArray,
+ tuple.getFieldStart(j), prevTuple.getFieldLength(j)) == 0) {
prefixFieldsMatch++;
- kp.pmi[j].matches++;
-
+ kp.pmi[j].matches++;
+
int prefixBytes = tupleWriter.bytesRequired(tuple, 0, prefixFieldsMatch);
- int spaceBenefit = tupleWriter.bytesRequired(tuple) - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount() - prefixFieldsMatch);
-
- if(kp.pmi[j].matches == occurrenceThreshold) {
- // if we compress this prefix, we pay the cost of storing it once, plus the size for one prefix slot
+ int spaceBenefit = tupleWriter.bytesRequired(tuple)
+ - tupleWriter.bytesRequired(tuple, prefixFieldsMatch, tuple.getFieldCount()
+ - prefixFieldsMatch);
+
+ if (kp.pmi[j].matches == occurrenceThreshold) {
+ // if we compress this prefix, we pay the cost of
+ // storing it once, plus the size for one prefix slot
kp.pmi[j].prefixBytes += prefixBytes;
kp.pmi[j].spaceCost += prefixBytes + slotManager.getSlotSize();
kp.pmi[j].prefixSlotsNeeded++;
- kp.pmi[j].spaceBenefit += occurrenceThreshold * spaceBenefit;
- }
- else if(kp.pmi[j].matches > occurrenceThreshold) {
- // we are beyond the occurrence threshold, every additional tuple with a matching prefix increases the benefit
+ kp.pmi[j].spaceBenefit += occurrenceThreshold * spaceBenefit;
+ } else if (kp.pmi[j].matches > occurrenceThreshold) {
+ // we are beyond the occurrence threshold, every
+ // additional tuple with a matching prefix increases the
+ // benefit
kp.pmi[j].spaceBenefit += spaceBenefit;
}
- }
- else {
+ } else {
kp.pmi[j].matches = 1;
break;
- }
+ }
}
-
- //System.out.println();
- //System.out.println("AFTER RECORD: " + i);
- //kp.print();
- //System.out.println("-----------------");
-
- // this means not even the first field matched, so we start to consider a new "key partition"
- if(maxCmps > 0 && prefixFieldsMatch == 0) {
- //System.out.println("NEW KEY PARTITION");
- kp.lastTupleIndex = i-1;
-
+
+ // System.out.println();
+ // System.out.println("AFTER RECORD: " + i);
+ // kp.print();
+ // System.out.println("-----------------");
+
+ // this means not even the first field matched, so we start to
+ // consider a new "key partition"
+ if (maxCmps > 0 && prefixFieldsMatch == 0) {
+ // System.out.println("NEW KEY PARTITION");
+ kp.lastTupleIndex = i - 1;
+
// remove keyPartitions that don't have enough tuples
- if((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold) keyPartitions.remove(keyPartitions.size() - 1);
-
- kp = new KeyPartition(maxCmps);
+ if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
+ keyPartitions.remove(keyPartitions.size() - 1);
+
+ kp = new KeyPartition(maxCmps);
keyPartitions.add(kp);
kp.firstTupleIndex = i;
- }
- }
+ }
+ }
kp.lastTupleIndex = tupleCount - 1;
// remove keyPartitions that don't have enough tuples
- if((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold) keyPartitions.remove(keyPartitions.size() - 1);
-
+ if ((kp.lastTupleIndex - kp.firstTupleIndex) + 1 < occurrenceThreshold)
+ keyPartitions.remove(keyPartitions.size() - 1);
+
return keyPartitions;
}
-
-
+
private class PrefixMatchInfo {
public int matches = 1;
- public int spaceCost = 0;
+ public int spaceCost = 0;
public int spaceBenefit = 0;
- public int prefixSlotsNeeded = 0;
+ public int prefixSlotsNeeded = 0;
public int prefixBytes = 0;
}
-
- private class KeyPartition {
+
+ private class KeyPartition {
public int firstTupleIndex;
public int lastTupleIndex;
public PrefixMatchInfo[] pmi;
-
+
public int maxBenefitMinusCost = 0;
- public int maxPmiIndex = -1;
+ public int maxPmiIndex = -1;
+
// number of fields used for compression for this kp of current page
-
+
public KeyPartition(int numKeyFields) {
pmi = new PrefixMatchInfo[numKeyFields];
- for(int i = 0; i < numKeyFields; i++) {
+ for (int i = 0; i < numKeyFields; i++) {
pmi[i] = new PrefixMatchInfo();
}
- }
- }
-
- private class SortByHeuristic implements Comparator<KeyPartition>{
- @Override
- public int compare(KeyPartition a, KeyPartition b) {
- if(a.maxPmiIndex < 0) {
- if(b.maxPmiIndex < 0) return 0;
- return 1;
- } else if(b.maxPmiIndex < 0) return -1;
-
- // non-negative maxPmiIndex, meaning a non-zero benefit exists
- float thisHeuristicVal = (float)a.maxBenefitMinusCost / (float)a.pmi[a.maxPmiIndex].prefixSlotsNeeded;
- float otherHeuristicVal = (float)b.maxBenefitMinusCost / (float)b.pmi[b.maxPmiIndex].prefixSlotsNeeded;
- if(thisHeuristicVal < otherHeuristicVal) return 1;
- else if(thisHeuristicVal > otherHeuristicVal) return -1;
- else return 0;
}
}
-
- private class SortByOriginalRank implements Comparator<KeyPartition>{
+
+ private class SortByHeuristic implements Comparator<KeyPartition> {
+ @Override
+ public int compare(KeyPartition a, KeyPartition b) {
+ if (a.maxPmiIndex < 0) {
+ if (b.maxPmiIndex < 0)
+ return 0;
+ return 1;
+ } else if (b.maxPmiIndex < 0)
+ return -1;
+
+ // non-negative maxPmiIndex, meaning a non-zero benefit exists
+ float thisHeuristicVal = (float) a.maxBenefitMinusCost / (float) a.pmi[a.maxPmiIndex].prefixSlotsNeeded;
+ float otherHeuristicVal = (float) b.maxBenefitMinusCost / (float) b.pmi[b.maxPmiIndex].prefixSlotsNeeded;
+ if (thisHeuristicVal < otherHeuristicVal)
+ return 1;
+ else if (thisHeuristicVal > otherHeuristicVal)
+ return -1;
+ else
+ return 0;
+ }
+ }
+
+ private class SortByOriginalRank implements Comparator<KeyPartition> {
@Override
public int compare(KeyPartition a, KeyPartition b) {
return a.firstTupleIndex - b.firstTupleIndex;
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java
index 2ceb58a..35e6951 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java
@@ -23,71 +23,70 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public abstract class AbstractBTreeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- protected final IFileMappingProviderProvider fileMappingProviderProvider;
-
- protected final IFileSplitProvider fileSplitProvider;
-
- protected final IBinaryComparatorFactory[] comparatorFactories;
-
- protected final IBTreeInteriorFrameFactory interiorFrameFactory;
- protected final IBTreeLeafFrameFactory leafFrameFactory;
-
- protected final IBufferCacheProvider bufferCacheProvider;
- protected final IBTreeRegistryProvider btreeRegistryProvider;
-
- protected final ITypeTrait[] typeTraits;
-
- public AbstractBTreeOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity, RecordDescriptor recDesc, IBufferCacheProvider bufferCacheProvider, IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider, IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories) {
+
+ private static final long serialVersionUID = 1L;
+
+ protected final IFileSplitProvider fileSplitProvider;
+
+ protected final IBinaryComparatorFactory[] comparatorFactories;
+
+ protected final IBTreeInteriorFrameFactory interiorFrameFactory;
+ protected final IBTreeLeafFrameFactory leafFrameFactory;
+
+ protected final IStorageManagerInterface storageManager;
+ protected final IBTreeRegistryProvider btreeRegistryProvider;
+
+ protected final ITypeTrait[] typeTraits;
+
+ public AbstractBTreeOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
+ RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+ IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider,
+ IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories) {
super(spec, inputArity, outputArity);
this.fileSplitProvider = fileSplitProvider;
- this.fileMappingProviderProvider = fileMappingProviderProvider;
- this.bufferCacheProvider = bufferCacheProvider;
- this.btreeRegistryProvider = btreeRegistryProvider;
+ this.storageManager = storageManager;
+ this.btreeRegistryProvider = btreeRegistryProvider;
this.interiorFrameFactory = interiorFactory;
this.leafFrameFactory = leafFactory;
this.typeTraits = typeTraits;
this.comparatorFactories = comparatorFactories;
- if(outputArity > 0) recordDescriptors[0] = recDesc;
+ if (outputArity > 0)
+ recordDescriptors[0] = recDesc;
}
- public IFileSplitProvider getFileSplitProvider() {
- return fileSplitProvider;
- }
+ public IFileSplitProvider getFileSplitProvider() {
+ return fileSplitProvider;
+ }
- public IFileMappingProviderProvider getFileMappingProviderProvider() {
- return fileMappingProviderProvider;
- }
+ public IBinaryComparatorFactory[] getComparatorFactories() {
+ return comparatorFactories;
+ }
- public IBinaryComparatorFactory[] getComparatorFactories() {
- return comparatorFactories;
- }
-
- public ITypeTrait[] getTypeTraits() {
- return typeTraits;
- }
-
- public IBTreeInteriorFrameFactory getInteriorFactory() {
- return interiorFrameFactory;
- }
+ public ITypeTrait[] getTypeTraits() {
+ return typeTraits;
+ }
- public IBTreeLeafFrameFactory getLeafFactory() {
- return leafFrameFactory;
- }
+ public IBTreeInteriorFrameFactory getInteriorFactory() {
+ return interiorFrameFactory;
+ }
- public IBufferCacheProvider getBufferCacheProvider() {
- return bufferCacheProvider;
- }
+ public IBTreeLeafFrameFactory getLeafFactory() {
+ return leafFrameFactory;
+ }
- public IBTreeRegistryProvider getBtreeRegistryProvider() {
- return btreeRegistryProvider;
- }
-
- public RecordDescriptor getRecordDescriptor() {
- return recordDescriptors[0];
- }
+ public IStorageManagerInterface getStorageManager() {
+ return storageManager;
+ }
+
+ public IBTreeRegistryProvider getBtreeRegistryProvider() {
+ return btreeRegistryProvider;
+ }
+
+ public RecordDescriptor getRecordDescriptor() {
+ return recordDescriptors[0];
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java
index 29a35d6..6b64b03 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java
@@ -25,33 +25,29 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public class BTreeBulkLoadOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final int[] fieldPermutation;
+
+ private static final long serialVersionUID = 1L;
+
+ private final int[] fieldPermutation;
private final float fillFactor;
-
- public BTreeBulkLoadOperatorDescriptor(JobSpecification spec,
- IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories,
- int[] fieldPermutation, float fillFactor) {
- super(spec, 1, 0, null, bufferCacheProvider,
- btreeRegistryProvider, fileSplitProvider, fileMappingProviderProvider, interiorFactory,
- leafFactory, typeTraits, comparatorFactories);
- this.fieldPermutation = fieldPermutation;
- this.fillFactor = fillFactor;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) {
- return new BTreeBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor, recordDescProvider);
- }
+
+ public BTreeBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider,
+ IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, float fillFactor) {
+ super(spec, 1, 0, null, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory, leafFactory,
+ typeTraits, comparatorFactories);
+ this.fieldPermutation = fieldPermutation;
+ this.fillFactor = fillFactor;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new BTreeBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
+ recordDescProvider);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java
index 92bbce1..1a868e7 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java
@@ -31,11 +31,11 @@
private final BTreeOpHelper btreeOpHelper;
private FrameTupleAccessor accessor;
private BTree.BulkLoadContext bulkLoadCtx;
-
+
private IRecordDescriptorProvider recordDescProvider;
private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-
+
public BTreeBulkLoadOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx,
int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider) {
btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.CREATE_BTREE);
@@ -43,46 +43,38 @@
this.recordDescProvider = recordDescProvider;
tuple.setFieldPermutation(fieldPermutation);
}
-
+
@Override
public void open() throws HyracksDataException {
AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
- RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksContext(), recDesc);
- IBTreeMetaDataFrame metaFrame = new MetaDataFrame();
+ IBTreeMetaDataFrame metaFrame = new MetaDataFrame();
btreeOpHelper.init();
- btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
- try {
- bulkLoadCtx = btreeOpHelper.getBTree().beginBulkLoad(fillFactor, btreeOpHelper.getLeafFrame(), btreeOpHelper.getInteriorFrame(), metaFrame);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
+ btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
+ bulkLoadCtx = btreeOpHelper.getBTree().beginBulkLoad(fillFactor, btreeOpHelper.getLeafFrame(),
+ btreeOpHelper.getInteriorFrame(), metaFrame);
}
-
+
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
accessor.reset(buffer);
-
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
- try {
- btreeOpHelper.getBTree().bulkLoadAddTuple(bulkLoadCtx, tuple);
- } catch (Exception e) {
- e.printStackTrace();
- }
+ btreeOpHelper.getBTree().bulkLoadAddTuple(bulkLoadCtx, tuple);
}
}
-
+
@Override
public void close() throws HyracksDataException {
try {
btreeOpHelper.getBTree().endBulkLoad(bulkLoadCtx);
- } catch (Exception e) {
- e.printStackTrace();
+ } finally {
+ btreeOpHelper.deinit();
}
}
-
+
@Override
public void flush() throws HyracksDataException {
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java
index 730b5f3..ddc7c39 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java
@@ -25,28 +25,23 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public class BTreeDiskOrderScanOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- public BTreeDiskOrderScanOperatorDescriptor(JobSpecification spec,
- RecordDescriptor recDesc,
- IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory,
- ITypeTrait[] typeTraits) {
- super(spec, 0, 1, recDesc, bufferCacheProvider,
- btreeRegistryProvider, fileSplitProvider, fileMappingProviderProvider, interiorFactory,
- leafFactory, typeTraits, null);
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) {
- return new BTreeDiskOrderScanOperatorNodePushable(this, ctx, partition);
- }
+
+ private static final long serialVersionUID = 1L;
+
+ public BTreeDiskOrderScanOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
+ IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
+ IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits) {
+ super(spec, 0, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
+ leafFactory, typeTraits, null);
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new BTreeDiskOrderScanOperatorNodePushable(this, ctx, partition);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java
index ebe1a9b..c3a0df2 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java
@@ -15,7 +15,6 @@
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import java.io.DataOutput;
-import java.io.FileNotFoundException;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.context.IHyracksContext;
@@ -34,26 +33,20 @@
public class BTreeDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
private final BTreeOpHelper btreeOpHelper;
- public BTreeDiskOrderScanOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx, int partition) {
+ public BTreeDiskOrderScanOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx,
+ int partition) {
btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.OPEN_BTREE);
}
@Override
public void initialize() throws HyracksDataException {
-
+
IBTreeLeafFrame cursorFrame = btreeOpHelper.getOperatorDescriptor().getLeafFactory().getFrame();
DiskOrderScanCursor cursor = new DiskOrderScanCursor(cursorFrame);
IBTreeMetaDataFrame metaFrame = new MetaDataFrame();
-
- try {
- btreeOpHelper.init();
- //btreeOpHelper.fill();
- btreeOpHelper.getBTree().diskOrderScan(cursor, cursorFrame, metaFrame);
- } catch (FileNotFoundException e1) {
- e1.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+
+ btreeOpHelper.init();
+ btreeOpHelper.getBTree().diskOrderScan(cursor, cursorFrame, metaFrame);
MultiComparator cmp = btreeOpHelper.getBTree().getMultiComparator();
ByteBuffer frame = btreeOpHelper.getHyracksContext().getResourceManager().allocateFrame();
@@ -61,13 +54,13 @@
appender.reset(frame, true);
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
DataOutput dos = tb.getDataOutput();
-
+
try {
while (cursor.hasNext()) {
tb.reset();
cursor.next();
- ITupleReference frameTuple = cursor.getTuple();
+ ITupleReference frameTuple = cursor.getTuple();
for (int i = 0; i < frameTuple.getFieldCount(); i++) {
dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
tb.addFieldEndOffset();
@@ -85,7 +78,7 @@
if (appender.getTupleCount() > 0) {
FrameUtils.flushFrame(frame, writer);
}
-
+
cursor.close();
writer.close();
@@ -93,4 +86,9 @@
e.printStackTrace();
}
}
+
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ btreeOpHelper.deinit();
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java
index 295fd50..5a04d79 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java
@@ -22,32 +22,27 @@
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public class BTreeDropOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private IBufferCacheProvider bufferCacheProvider;
- private IBTreeRegistryProvider btreeRegistryProvider;
- private IFileMappingProviderProvider fileMappingProviderProvider;
- private IFileSplitProvider fileSplitProvider;
-
- public BTreeDropOperatorDescriptor(JobSpecification spec,
- IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider) {
- super(spec, 0, 0);
- this.fileMappingProviderProvider = fileMappingProviderProvider;
- this.bufferCacheProvider = bufferCacheProvider;
- this.btreeRegistryProvider = btreeRegistryProvider;
- this.fileSplitProvider = fileSplitProvider;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) {
- return new BTreeDropOperatorNodePushable(bufferCacheProvider, btreeRegistryProvider, fileSplitProvider, partition, fileMappingProviderProvider);
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private IStorageManagerInterface storageManager;
+ private IBTreeRegistryProvider btreeRegistryProvider;
+ private IFileSplitProvider fileSplitProvider;
+
+ public BTreeDropOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider) {
+ super(spec, 0, 0);
+ this.storageManager = storageManager;
+ this.btreeRegistryProvider = btreeRegistryProvider;
+ this.fileSplitProvider = fileSplitProvider;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new BTreeDropOperatorNodePushable(storageManager, btreeRegistryProvider, fileSplitProvider, partition);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java
index 3bdf4a2..3745fad 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import java.io.File;
@@ -7,21 +22,20 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class BTreeDropOperatorNodePushable extends AbstractOperatorNodePushable {
-
+
private IBTreeRegistryProvider btreeRegistryProvider;
- private IBufferCacheProvider bufferCacheProvider;
- private IFileMappingProviderProvider fileMappingProviderProvider;
+ private IStorageManagerInterface storageManager;
private IFileSplitProvider fileSplitProvider;
private int partition;
-
- public BTreeDropOperatorNodePushable(IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider, int partition,
- IFileMappingProviderProvider fileMappingProviderProvider) {
- this.fileMappingProviderProvider = fileMappingProviderProvider;
- this.bufferCacheProvider = bufferCacheProvider;
+
+ public BTreeDropOperatorNodePushable(IStorageManagerInterface storageManager,
+ IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider, int partition) {
+ this.storageManager = storageManager;
this.btreeRegistryProvider = btreeRegistryProvider;
this.fileSplitProvider = fileSplitProvider;
this.partition = partition;
@@ -45,31 +59,32 @@
public void initialize() throws HyracksDataException {
BTreeRegistry btreeRegistry = btreeRegistryProvider.getBTreeRegistry();
- FileManager fileManager = bufferCacheProvider.getFileManager();
-
- File f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
- String fileName = f.getAbsolutePath();
-
- Integer fileId = fileMappingProviderProvider.getFileMappingProvider().getFileId(fileName);
- if(fileId == null) {
- throw new HyracksDataException("Cannot drop B-Tree with name " + fileName + ". No file mapping exists.");
+ IBufferCache bufferCache = storageManager.getBufferCache();
+ IFileMapProvider fileMapProvider = storageManager.getFileMapProvider();
+
+ File f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+ String fileName = f.getAbsolutePath();
+
+ boolean fileIsMapped = fileMapProvider.isMapped(fileName);
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Cannot drop B-Tree with name " + fileName + ". No file mapping exists.");
}
- int btreeFileId = fileId;
-
- // unregister btree instance
+
+ int btreeFileId = fileMapProvider.lookupFileId(fileName);
+
+ // unregister btree instance
btreeRegistry.lock();
try {
btreeRegistry.unregister(btreeFileId);
} finally {
btreeRegistry.unlock();
}
-
+
// remove name to id mapping
- fileMappingProviderProvider.getFileMappingProvider().unmapName(fileName);
-
- // unregister file
- fileManager.unregisterFile(btreeFileId);
-
+ bufferCache.deleteFile(btreeFileId);
+
+ // TODO: should this be handled through the BufferCache or
+ // FileMapProvider?
if (f.exists()) {
f.delete();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java
index fd7c5a6..d66e7c0 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksContext;
@@ -12,6 +27,7 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// re-create in-memory state for a btree that has already been built (i.e., the file exists):
// 1. register files in file manager (FileManager)
@@ -20,28 +36,20 @@
public class BTreeFileEnlistmentOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
- private static final long serialVersionUID = 1L;
-
- public BTreeFileEnlistmentOperatorDescriptor(JobSpecification spec,
- RecordDescriptor recDesc,
- IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider,
- IFileMappingProviderProvider fileMappingProviderProvider,
- IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories) {
- super(spec, 0, 0, recDesc, bufferCacheProvider,
- btreeRegistryProvider, fileSplitProvider, fileMappingProviderProvider,
- interiorFactory, leafFactory, typeTraits, comparatorFactories);
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int partitions) throws HyracksDataException {
- return new BTreeFileEnlistmentOperatorNodePushable(this, ctx, partition);
- }
-
+ private static final long serialVersionUID = 1L;
+
+ public BTreeFileEnlistmentOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
+ IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
+ IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories) {
+ super(spec, 0, 0, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
+ leafFactory, typeTraits, comparatorFactories);
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int partitions) throws HyracksDataException {
+ return new BTreeFileEnlistmentOperatorNodePushable(this, ctx, partition);
+ }
+
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java
index cd0b4b3..3e48a01 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
@@ -7,34 +22,35 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
public class BTreeFileEnlistmentOperatorNodePushable extends AbstractOperatorNodePushable {
-
- private final BTreeOpHelper btreeOpHelper;
-
- public BTreeFileEnlistmentOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx, int partition) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.ENLIST_BTREE);
- }
- @Override
- public void deinitialize() throws HyracksDataException {
- }
+ private final BTreeOpHelper btreeOpHelper;
- @Override
- public int getInputArity() {
- return 0;
- }
+ public BTreeFileEnlistmentOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx,
+ int partition) {
+ btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.ENLIST_BTREE);
+ }
- @Override
- public IFrameWriter getInputFrameWriter(int index) {
- return null;
- }
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ }
- @Override
- public void initialize() throws HyracksDataException {
- btreeOpHelper.init();
- }
+ @Override
+ public int getInputArity() {
+ return 0;
+ }
- @Override
- public void setOutputFrameWriter(int index, IFrameWriter writer,
- RecordDescriptor recordDesc) {
- }
+ @Override
+ public IFrameWriter getInputFrameWriter(int index) {
+ return null;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ btreeOpHelper.init();
+ btreeOpHelper.deinit();
+ }
+
+ @Override
+ public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java
index 7fb4398..71e1a16 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java
@@ -27,36 +27,31 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public class BTreeInsertUpdateDeleteOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final int[] fieldPermutation;
-
- private BTreeOp op;
-
- public BTreeInsertUpdateDeleteOperatorDescriptor(JobSpecification spec,
- RecordDescriptor recDesc,
- IBufferCacheProvider bufferCacheProvider,
- IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider,
- IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories,
- int[] fieldPermutation, BTreeOp op) {
- super(spec, 1, 1, recDesc, bufferCacheProvider,
- btreeRegistryProvider, fileSplitProvider, fileMappingProviderProvider, interiorFactory,
- leafFactory, typeTraits, comparatorFactories);
- this.fieldPermutation = fieldPermutation;
- this.op = op;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) {
- return new BTreeInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation, recordDescProvider, op);
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private final int[] fieldPermutation;
+
+ private BTreeOp op;
+
+ public BTreeInsertUpdateDeleteOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
+ IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
+ IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, BTreeOp op) {
+ super(spec, 1, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
+ leafFactory, typeTraits, comparatorFactories);
+ this.fieldPermutation = fieldPermutation;
+ this.op = op;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new BTreeInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
+ recordDescProvider, op);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java
index 44d26da..a4296cb 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java
@@ -36,9 +36,9 @@
private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
private ByteBuffer writeBuffer;
private BTreeOpContext opCtx;
-
+
public BTreeInsertUpdateDeleteOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx,
- int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, BTreeOp op) {
+ int partition, int[] fieldPermutation, IRecordDescriptorProvider recordDescProvider, BTreeOp op) {
btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.OPEN_BTREE);
this.recordDescProvider = recordDescProvider;
this.op = op;
@@ -46,20 +46,26 @@
}
@Override
- public void close() throws HyracksDataException {
- writer.close();
+ public void open() throws HyracksDataException {
+ AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
+ RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksContext(), inputRecDesc);
+ writeBuffer = btreeOpHelper.getHyracksContext().getResourceManager().allocateFrame();
+ btreeOpHelper.init();
+ btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
+ opCtx = btreeOpHelper.getBTree().createOpContext(op, btreeOpHelper.getLeafFrame(),
+ btreeOpHelper.getInteriorFrame(), new MetaDataFrame());
}
@Override
public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- final BTree btree = btreeOpHelper.getBTree();
+ final BTree btree = btreeOpHelper.getBTree();
accessor.reset(buffer);
-
+
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
tuple.reset(accessor, i);
try {
-
switch (op) {
case BTO_INSERT: {
@@ -73,7 +79,8 @@
break;
default: {
- throw new HyracksDataException("Unsupported operation " + op + " in BTree InsertUpdateDelete operator");
+ throw new HyracksDataException("Unsupported operation " + op
+ + " in BTree InsertUpdateDelete operator");
}
}
@@ -89,18 +96,12 @@
}
@Override
- public void open() throws HyracksDataException {
- AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
- RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
- accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksContext(), inputRecDesc);
- writeBuffer = btreeOpHelper.getHyracksContext().getResourceManager().allocateFrame();
+ public void close() throws HyracksDataException {
try {
- btreeOpHelper.init();
- btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
- } catch (Exception e) {
- e.printStackTrace();
+ writer.close();
+ } finally {
+ btreeOpHelper.deinit();
}
- opCtx = btreeOpHelper.getBTree().createOpContext(op, btreeOpHelper.getLeafFrame(), btreeOpHelper.getInteriorFrame(), new MetaDataFrame());
}
@Override
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
index 681eb9d..45d1b13 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
@@ -15,8 +15,6 @@
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.RandomAccessFile;
import edu.uci.ics.hyracks.api.context.IHyracksContext;
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
@@ -28,130 +26,100 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
final class BTreeOpHelper {
-
- public enum BTreeMode {
- OPEN_BTREE,
- CREATE_BTREE,
- ENLIST_BTREE
- }
-
- private IBTreeInteriorFrame interiorFrame;
+
+ public enum BTreeMode {
+ OPEN_BTREE, CREATE_BTREE, ENLIST_BTREE
+ }
+
+ private IBTreeInteriorFrame interiorFrame;
private IBTreeLeafFrame leafFrame;
private BTree btree;
private int btreeFileId = -1;
private int partition;
-
+
private AbstractBTreeOperatorDescriptor opDesc;
private IHyracksContext ctx;
private BTreeMode mode;
-
+
BTreeOpHelper(AbstractBTreeOperatorDescriptor opDesc, final IHyracksContext ctx, int partition, BTreeMode mode) {
this.opDesc = opDesc;
this.ctx = ctx;
this.mode = mode;
this.partition = partition;
- }
-
+ }
+
void init() throws HyracksDataException {
-
- IBufferCache bufferCache = opDesc.getBufferCacheProvider().getBufferCache();
- FileManager fileManager = opDesc.getBufferCacheProvider().getFileManager();
- IFileMappingProviderProvider fileMappingProviderProvider = opDesc.getFileMappingProviderProvider();
+
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache();
+ IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider();
IFileSplitProvider fileSplitProvider = opDesc.getFileSplitProvider();
-
+
File f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
- if(!f.exists()) {
- File dir = new File(f.getParent());
- dir.mkdirs();
- }
- RandomAccessFile raf;
- try {
- raf = new RandomAccessFile(f, "rw");
- } catch (FileNotFoundException e) {
- throw new HyracksDataException(e);
- }
-
String fileName = f.getAbsolutePath();
- Integer fileId = fileMappingProviderProvider.getFileMappingProvider().getFileId(fileName);
-
- switch(mode) {
-
- case OPEN_BTREE: {
- if(fileId == null) {
- throw new HyracksDataException("Cannot get id for file " + fileName + ". File name has not been mapped.");
- }
- if(!f.exists()) {
- throw new HyracksDataException("Trying to open btree from file " + fileName + " but file doesn't exist.");
- }
- } break;
-
- case CREATE_BTREE: {
- if(fileId == null) {
- fileId = fileMappingProviderProvider.getFileMappingProvider().mapNameToFileId(fileName, true);
- }
- else {
- throw new HyracksDataException("Cannot map file " + fileName + " to an id. File name has already been mapped.");
- }
- } break;
-
- case ENLIST_BTREE: {
- if(fileId == null) {
- fileId = fileMappingProviderProvider.getFileMappingProvider().mapNameToFileId(fileName, true);
- }
- else {
- throw new HyracksDataException("Cannot map file " + fileName + " to an id. File name has already been mapped.");
- }
- if(!f.exists()) {
- throw new HyracksDataException("Trying to enlist btree from file " + fileName + " but file doesn't exist.");
- }
- } break;
+ boolean fileIsMapped = fileMapProvider.isMapped(fileName);
+
+ switch (mode) {
+
+ case OPEN_BTREE: {
+ if (!fileIsMapped) {
+ bufferCache.createFile(fileName);
+ // throw new
+ // HyracksDataException("Trying to open btree from unmapped file "
+ // + fileName);
+ }
+ }
+ break;
+
+ case CREATE_BTREE:
+ case ENLIST_BTREE: {
+ if (!fileIsMapped) {
+ bufferCache.createFile(fileName);
+ }
+ }
+ break;
+
}
-
- btreeFileId = fileId;
-
- if(mode == BTreeMode.CREATE_BTREE || mode == BTreeMode.ENLIST_BTREE) {
- FileInfo fi = new FileInfo(btreeFileId, raf);
- fileManager.registerFile(fi);
- }
-
+
+ btreeFileId = fileMapProvider.lookupFileId(fileName);
+ bufferCache.openFile(btreeFileId);
+
interiorFrame = opDesc.getInteriorFactory().getFrame();
leafFrame = opDesc.getLeafFactory().getFrame();
BTreeRegistry btreeRegistry = opDesc.getBtreeRegistryProvider().getBTreeRegistry();
btree = btreeRegistry.get(btreeFileId);
if (btree == null) {
-
- // create new btree and register it
+
+ // create new btree and register it
btreeRegistry.lock();
try {
// check if btree has already been registered by another thread
btree = btreeRegistry.get(btreeFileId);
if (btree == null) {
// this thread should create and register the btree
-
+
IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getComparatorFactories().length];
for (int i = 0; i < opDesc.getComparatorFactories().length; i++) {
comparators[i] = opDesc.getComparatorFactories()[i].createBinaryComparator();
}
-
+
MultiComparator cmp = new MultiComparator(opDesc.getTypeTraits(), comparators);
-
+
btree = new BTree(bufferCache, opDesc.getInteriorFactory(), opDesc.getLeafFactory(), cmp);
if (mode == BTreeMode.CREATE_BTREE) {
MetaDataFrame metaFrame = new MetaDataFrame();
try {
- btree.create(btreeFileId, leafFrame, metaFrame);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
+ btree.create(btreeFileId, leafFrame, metaFrame);
+ btree.open(btreeFileId);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
}
- btree.open(btreeFileId);
btreeRegistry.register(btreeFileId, btree);
}
} finally {
@@ -159,7 +127,14 @@
}
}
}
-
+
+ public void deinit() throws HyracksDataException {
+ if (btreeFileId != -1) {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache();
+ bufferCache.closeFile(btreeFileId);
+ }
+ }
+
public BTree getBTree() {
return btree;
}
@@ -179,8 +154,8 @@
public IBTreeInteriorFrame getInteriorFrame() {
return interiorFrame;
}
-
+
public int getBTreeFileId() {
- return btreeFileId;
+ return btreeFileId;
}
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java
index 61491b1..9b90e22 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java
@@ -22,36 +22,35 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
public class BTreeRegistry {
-
+
private HashMap<Integer, BTree> map = new HashMap<Integer, BTree>();
private Lock registryLock = new ReentrantLock();
-
+
public BTree get(int fileId) {
- return map.get(fileId);
+ return map.get(fileId);
}
-
- // TODO: not very high concurrency, but good enough for now
- public void lock() {
- registryLock.lock();
+
+ // TODO: not very high concurrency, but good enough for now
+ public void lock() {
+ registryLock.lock();
}
-
+
public void unlock() {
registryLock.unlock();
}
-
+
public void register(int fileId, BTree btree) {
map.put(fileId, btree);
}
-
+
public void unregister(int fileId) {
try {
- map.remove(fileId);
- }
- catch (Exception e) {
+ map.remove(fileId);
+ } catch (Exception e) {
}
- }
-
+ }
+
public int size() {
- return map.size();
+ return map.size();
}
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistryProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistryProvider.java
index 677d4d4..ef4e85c 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistryProvider.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistryProvider.java
@@ -16,16 +16,16 @@
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
public class BTreeRegistryProvider implements IBTreeRegistryProvider {
-
- private static final long serialVersionUID = 1L;
-
- private static BTreeRegistry btreeRegistry = null;
-
- @Override
- public synchronized BTreeRegistry getBTreeRegistry() {
- if(btreeRegistry == null) {
- btreeRegistry = new BTreeRegistry();
- }
- return btreeRegistry;
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private static BTreeRegistry btreeRegistry = null;
+
+ @Override
+ public synchronized BTreeRegistry getBTreeRegistry() {
+ if (btreeRegistry == null) {
+ btreeRegistry = new BTreeRegistry();
+ }
+ return btreeRegistry;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
index 6b0e4de..dd67762 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
@@ -26,29 +26,38 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
public class BTreeSearchOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
- private boolean isForward;
- private int[] lowKeyFields; // fields in input tuple to be used as low keys
- private int[] highKeyFields; // fields in input tuple to be used as high keys
- private boolean lowKeyInclusive;
+ private static final long serialVersionUID = 1L;
+
+ private boolean isForward;
+ private int[] lowKeyFields; // fields in input tuple to be used as low keys
+ private int[] highKeyFields; // fields in input tuple to be used as high
+ // keys
+ private boolean lowKeyInclusive;
private boolean highKeyInclusive;
-
- public BTreeSearchOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, IBufferCacheProvider bufferCacheProvider, IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider, IFileMappingProviderProvider fileMappingProviderProvider, IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive) {
- super(spec, 1, 1, recDesc, bufferCacheProvider, btreeRegistryProvider, fileSplitProvider, fileMappingProviderProvider, interiorFactory, leafFactory, typeTraits, comparatorFactories);
- this.isForward = isForward;
- this.lowKeyFields = lowKeyFields;
- this.highKeyFields = highKeyFields;
- this.lowKeyInclusive = lowKeyInclusive;
- this.highKeyInclusive = highKeyInclusive;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(final IHyracksContext ctx, final IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new BTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward, lowKeyFields, highKeyFields, lowKeyInclusive, highKeyInclusive);
- }
+
+ public BTreeSearchOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
+ IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
+ IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+ boolean lowKeyInclusive, boolean highKeyInclusive) {
+ super(spec, 1, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
+ leafFactory, typeTraits, comparatorFactories);
+ this.isForward = isForward;
+ this.lowKeyFields = lowKeyFields;
+ this.highKeyFields = highKeyFields;
+ this.lowKeyInclusive = lowKeyInclusive;
+ this.highKeyInclusive = highKeyInclusive;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(final IHyracksContext ctx, final IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new BTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, isForward, lowKeyFields,
+ highKeyFields, lowKeyInclusive, highKeyInclusive);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
index 052d7e1..d06e2ce 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
@@ -38,157 +38,164 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
public class BTreeSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private BTreeOpHelper btreeOpHelper;
- private FrameTupleAccessor accessor;
-
+ private BTreeOpHelper btreeOpHelper;
+ private FrameTupleAccessor accessor;
+
private ByteBuffer writeBuffer;
private FrameTupleAppender appender;
private ArrayTupleBuilder tb;
private DataOutput dos;
-
- private BTree btree;
+
+ private BTree btree;
private boolean isForward;
- private PermutingFrameTupleReference lowKey;
- private PermutingFrameTupleReference highKey;
+ private PermutingFrameTupleReference lowKey;
+ private PermutingFrameTupleReference highKey;
private boolean lowKeyInclusive;
private boolean highKeyInclusive;
private RangePredicate rangePred;
private MultiComparator lowKeySearchCmp;
private MultiComparator highKeySearchCmp;
- private IBTreeCursor cursor;
- private IBTreeLeafFrame cursorFrame;
+ private IBTreeCursor cursor;
+ private IBTreeLeafFrame cursorFrame;
private BTreeOpContext opCtx;
-
- private RecordDescriptor recDesc;
-
- public BTreeSearchOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx, int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward, int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive) {
+
+ private RecordDescriptor recDesc;
+
+ public BTreeSearchOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksContext ctx, int partition,
+ IRecordDescriptorProvider recordDescProvider, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
+ boolean lowKeyInclusive, boolean highKeyInclusive) {
btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.OPEN_BTREE);
this.isForward = isForward;
this.lowKeyInclusive = lowKeyInclusive;
this.highKeyInclusive = highKeyInclusive;
- this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
- if(lowKeyFields != null && lowKeyFields.length > 0) {
- lowKey = new PermutingFrameTupleReference();
- lowKey.setFieldPermutation(lowKeyFields);
+ this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ if (lowKeyFields != null && lowKeyFields.length > 0) {
+ lowKey = new PermutingFrameTupleReference();
+ lowKey.setFieldPermutation(lowKeyFields);
}
- if(highKeyFields != null && highKeyFields.length > 0) {
- highKey = new PermutingFrameTupleReference();
- highKey.setFieldPermutation(highKeyFields);
+ if (highKeyFields != null && highKeyFields.length > 0) {
+ highKey = new PermutingFrameTupleReference();
+ highKey.setFieldPermutation(highKeyFields);
}
}
-
+
@Override
- public void open() throws HyracksDataException {
- AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
+ public void open() throws HyracksDataException {
+ AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksContext(), recDesc);
-
+
cursorFrame = opDesc.getLeafFactory().getFrame();
cursor = new RangeSearchCursor(cursorFrame);
-
- try {
- btreeOpHelper.init();
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
+
+ btreeOpHelper.init();
btree = btreeOpHelper.getBTree();
-
+
// construct range predicate
-
+
int lowKeySearchFields = btree.getMultiComparator().getComparators().length;
int highKeySearchFields = btree.getMultiComparator().getComparators().length;
- if(lowKey != null) lowKeySearchFields = lowKey.getFieldCount();
- if(highKey != null) highKeySearchFields = highKey.getFieldCount();
-
+ if (lowKey != null)
+ lowKeySearchFields = lowKey.getFieldCount();
+ if (highKey != null)
+ highKeySearchFields = highKey.getFieldCount();
+
IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
for (int i = 0; i < lowKeySearchFields; i++) {
- lowKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
+ lowKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
}
lowKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(), lowKeySearchComparators);
-
- if(lowKeySearchFields == highKeySearchFields) {
- highKeySearchCmp = lowKeySearchCmp;
- }
- else {
- IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+
+ if (lowKeySearchFields == highKeySearchFields) {
+ highKeySearchCmp = lowKeySearchCmp;
+ } else {
+ IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
for (int i = 0; i < highKeySearchFields; i++) {
- highKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
+ highKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
}
highKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(), highKeySearchComparators);
-
+
}
-
- rangePred = new RangePredicate(isForward, null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp, highKeySearchCmp);
-
+
+ rangePred = new RangePredicate(isForward, null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
+ highKeySearchCmp);
+
accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksContext(), recDesc);
-
+
writeBuffer = btreeOpHelper.getHyracksContext().getResourceManager().allocateFrame();
tb = new ArrayTupleBuilder(btree.getMultiComparator().getFieldCount());
dos = tb.getDataOutput();
- appender = new FrameTupleAppender(btreeOpHelper.getHyracksContext());
+ appender = new FrameTupleAppender(btreeOpHelper.getHyracksContext());
appender.reset(writeBuffer, true);
-
- opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, btreeOpHelper.getLeafFrame(), btreeOpHelper.getInteriorFrame(), null);
+
+ opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, btreeOpHelper.getLeafFrame(), btreeOpHelper
+ .getInteriorFrame(), null);
}
-
+
private void writeSearchResults() throws Exception {
- while (cursor.hasNext()) {
- tb.reset();
- cursor.next();
+ while (cursor.hasNext()) {
+ tb.reset();
+ cursor.next();
- ITupleReference frameTuple = cursor.getTuple();
- for (int i = 0; i < frameTuple.getFieldCount(); i++) {
- dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
- tb.addFieldEndOffset();
- }
+ ITupleReference frameTuple = cursor.getTuple();
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(writeBuffer, writer);
- appender.reset(writeBuffer, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ appender.reset(writeBuffer, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
}
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- accessor.reset(buffer);
-
- int tupleCount = accessor.getTupleCount();
- try {
- for (int i = 0; i < tupleCount; i++) {
- if(lowKey != null) lowKey.reset(accessor, i);
- if(highKey != null) highKey.reset(accessor, i);
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+
+ int tupleCount = accessor.getTupleCount();
+ try {
+ for (int i = 0; i < tupleCount; i++) {
+ if (lowKey != null)
+ lowKey.reset(accessor, i);
+ if (highKey != null)
+ highKey.reset(accessor, i);
rangePred.setLowKey(lowKey, lowKeyInclusive);
rangePred.setHighKey(highKey, highKeyInclusive);
-
- cursor.reset();
- btree.search(cursor, rangePred, opCtx);
- writeSearchResults();
- }
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
-
- @Override
- public void close() throws HyracksDataException {
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(writeBuffer, writer);
- }
- writer.close();
- try {
- cursor.close();
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
- @Override
- public void flush() throws HyracksDataException {
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(writeBuffer, writer);
- }
- }
+ cursor.reset();
+ btree.search(cursor, rangePred, opCtx);
+ writeSearchResults();
+ }
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ writer.close();
+ try {
+ cursor.close();
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ } finally {
+ btreeOpHelper.deinit();
+ }
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BufferCacheProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BufferCacheProvider.java
deleted file mode 100644
index 289d99a..0000000
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BufferCacheProvider.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
-
-public class BufferCacheProvider implements IBufferCacheProvider {
-
- private static final long serialVersionUID = 1L;
-
- private static IBufferCache bufferCache = null;
- private static FileManager fileManager = null;
- private static final int PAGE_SIZE = 8192;
- private static final int NUM_PAGES = 40;
-
- public void reset() {
- bufferCache = null;
- fileManager = null;
- }
-
- @Override
- public synchronized IBufferCache getBufferCache() {
-
- if(bufferCache == null) {
- if(fileManager == null) fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
- }
-
- return bufferCache;
- }
-
- @Override
- public synchronized FileManager getFileManager() {
- if(fileManager == null) fileManager = new FileManager();
- return fileManager;
- }
-
- public class BufferAllocator implements ICacheMemoryAllocator {
- @Override
- public ByteBuffer[] allocate(int pageSize, int numPages) {
- ByteBuffer[] buffers = new ByteBuffer[numPages];
- for (int i = 0; i < numPages; ++i) {
- buffers[i] = ByteBuffer.allocate(pageSize);
- }
- return buffers;
- }
- }
-}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorDescriptor.java
index 9ef860b..10a3252 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorDescriptor.java
@@ -1,3 +1,17 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksContext;
@@ -10,27 +24,26 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
public class ConstantTupleSourceOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private int[] fieldSlots;
- private byte[] tupleData;
- private int tupleSize;
-
- public ConstantTupleSourceOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, int[] fieldSlots, byte[] tupleData, int tupleSize) {
- super(spec, 0, 1);
- this.tupleData = tupleData;
- this.fieldSlots = fieldSlots;
- this.tupleSize = tupleSize;
- recordDescriptors[0] = recDesc;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) throws HyracksDataException {
- return new ConstantTupleSourceOperatorNodePushable(ctx, fieldSlots, tupleData, tupleSize);
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private int[] fieldSlots;
+ private byte[] tupleData;
+ private int tupleSize;
+
+ public ConstantTupleSourceOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, int[] fieldSlots,
+ byte[] tupleData, int tupleSize) {
+ super(spec, 0, 1);
+ this.tupleData = tupleData;
+ this.fieldSlots = fieldSlots;
+ this.tupleSize = tupleSize;
+ recordDescriptors[0] = recDesc;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new ConstantTupleSourceOperatorNodePushable(ctx, fieldSlots, tupleData, tupleSize);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorNodePushable.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorNodePushable.java
index e02a37c..775c9aa 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/ConstantTupleSourceOperatorNodePushable.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import java.nio.ByteBuffer;
@@ -9,30 +24,30 @@
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
public class ConstantTupleSourceOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
-
- private IHyracksContext ctx;
-
- private int[] fieldSlots;
- private byte[] tupleData;
- private int tupleSize;
-
-
- public ConstantTupleSourceOperatorNodePushable(IHyracksContext ctx, int[] fieldSlots, byte[] tupleData, int tupleSize) {
- super();
- this.fieldSlots = fieldSlots;
- this.tupleData = tupleData;
- this.tupleSize = tupleSize;
- this.ctx = ctx;
- }
-
- @Override
- public void initialize() throws HyracksDataException {
- ByteBuffer writeBuffer = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- appender.reset(writeBuffer, true);
- if(fieldSlots != null && tupleData != null && tupleSize > 0)
- appender.append(fieldSlots, tupleData, 0, tupleSize);
- FrameUtils.flushFrame(writeBuffer, writer);
- writer.close();
- }
+
+ private IHyracksContext ctx;
+
+ private int[] fieldSlots;
+ private byte[] tupleData;
+ private int tupleSize;
+
+ public ConstantTupleSourceOperatorNodePushable(IHyracksContext ctx, int[] fieldSlots, byte[] tupleData,
+ int tupleSize) {
+ super();
+ this.fieldSlots = fieldSlots;
+ this.tupleData = tupleData;
+ this.tupleSize = tupleSize;
+ this.ctx = ctx;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ ByteBuffer writeBuffer = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ appender.reset(writeBuffer, true);
+ if (fieldSlots != null && tupleData != null && tupleSize > 0)
+ appender.append(fieldSlots, tupleData, 0, tupleSize);
+ FrameUtils.flushFrame(writeBuffer, writer);
+ writer.close();
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProvider.java
deleted file mode 100644
index f4a68bf..0000000
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProvider.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.util.Hashtable;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
-
-public class FileMappingProvider implements IFileMappingProvider {
-
- private static final long serialVersionUID = 1L;
- private int nextFileId = 0;
- private Map<String, Integer> map = new Hashtable<String, Integer>();
-
- @Override
- public synchronized Integer mapNameToFileId(String name, boolean create) {
- Integer val = map.get(name);
- if(create) {
- if(val == null) {
- int ret = nextFileId;
- map.put(name, nextFileId++);
- return ret;
- }
- else {
- return null; // create requested but value already exists
- }
- }
- else {
- return val; // just return value
- }
- }
-
- @Override
- public void unmapName(String name) {
- map.remove(name);
- }
-
- @Override
- public Integer getFileId(String name) {
- return map.get(name);
- }
-}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProviderProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProviderProvider.java
deleted file mode 100644
index edf1898..0000000
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/FileMappingProviderProvider.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
-
-public class FileMappingProviderProvider implements IFileMappingProviderProvider {
- private static final long serialVersionUID = 1L;
-
- private static IFileMappingProvider fileMappingProvider = null;
-
- @Override
- public IFileMappingProvider getFileMappingProvider() {
- if(fileMappingProvider == null) {
- fileMappingProvider = new FileMappingProvider();
- }
- return fileMappingProvider;
- }
-}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/HyracksSimpleStorageManagerInterface.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/HyracksSimpleStorageManagerInterface.java
new file mode 100644
index 0000000..12538d2
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/HyracksSimpleStorageManagerInterface.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class HyracksSimpleStorageManagerInterface implements IStorageManagerInterface {
+ private static final long serialVersionUID = 1L;
+
+ private static transient IBufferCache bufferCache = null;
+ private static transient IFileMapManager fmManager;
+ private int PAGE_SIZE = 8192;
+ private int NUM_PAGES = 40;
+
+ public HyracksSimpleStorageManagerInterface() {
+ init();
+ }
+
+ public HyracksSimpleStorageManagerInterface(int pageSize, int numPages) {
+ PAGE_SIZE = pageSize;
+ NUM_PAGES = numPages;
+ init();
+ }
+
+ private void init() {
+ ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+ IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
+ fmManager = new IFileMapManager() {
+ private Map<Integer, String> id2nameMap = new HashMap<Integer, String>();
+ private Map<String, Integer> name2IdMap = new HashMap<String, Integer>();
+ private int idCounter = 0;
+
+ @Override
+ public String lookupFileName(int fileId) throws HyracksDataException {
+ String fName = id2nameMap.get(fileId);
+ if (fName == null) {
+ throw new HyracksDataException("No mapping found for id: " + fileId);
+ }
+ return fName;
+ }
+
+ @Override
+ public int lookupFileId(String fileName) throws HyracksDataException {
+ Integer fileId = name2IdMap.get(fileName);
+ if (fileId == null) {
+ throw new HyracksDataException("No mapping found for name: " + fileName);
+ }
+ return fileId;
+ }
+
+ @Override
+ public boolean isMapped(String fileName) {
+ return name2IdMap.containsKey(fileName);
+ }
+
+ @Override
+ public boolean isMapped(int fileId) {
+ return id2nameMap.containsKey(fileId);
+ }
+
+ @Override
+ public void unregisterFile(int fileId) throws HyracksDataException {
+ String fileName = id2nameMap.remove(fileId);
+ name2IdMap.remove(fileName);
+ }
+
+ @Override
+ public void registerFile(String fileName) throws HyracksDataException {
+ Integer fileId = idCounter++;
+ id2nameMap.put(fileId, fileName);
+ name2IdMap.put(fileName, fileId);
+ }
+ };
+ bufferCache = new BufferCache(allocator, prs, fmManager, PAGE_SIZE, NUM_PAGES);
+ }
+
+ public IFileMapProvider getFileMapProvider() {
+ return fmManager;
+ }
+
+ @Override
+ public IBufferCache getBufferCache() {
+ return bufferCache;
+ }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeOperatorDescriptor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeOperatorDescriptor.java
deleted file mode 100644
index 707d20e..0000000
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeOperatorDescriptor.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-
-public interface IBTreeOperatorDescriptor {
- public String getBTreeFileName();
- public int getBTreeFileId();
-
- public MultiComparator getMultiComparator();
- public RangePredicate getRangePred();
-
- public IBTreeInteriorFrameFactory getInteriorFactory();
- public IBTreeLeafFrameFactory getLeafFactory();
-
- public IBufferCacheProvider getBufferCacheProvider();
- public IBTreeRegistryProvider getBTreeRegistryProvider();
-}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java
index 128af09..1f0f1ef 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java
@@ -18,5 +18,5 @@
import java.io.Serializable;
public interface IBTreeRegistryProvider extends Serializable {
- public BTreeRegistry getBTreeRegistry();
+ public BTreeRegistry getBTreeRegistry();
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IFileMappingProviderProvider.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IFileMappingProviderProvider.java
deleted file mode 100644
index b88e058..0000000
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IFileMappingProviderProvider.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.io.Serializable;
-
-import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
-
-public interface IFileMappingProviderProvider extends Serializable {
- public IFileMappingProvider getFileMappingProvider();
-}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java
index db74a9c..a3f343a 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java
@@ -1,17 +1,32 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class PermutingFrameTupleReference implements IFrameTupleReference {
- private IFrameTupleAccessor fta;
+ private IFrameTupleAccessor fta;
private int tIndex;
private int[] fieldPermutation;
-
+
public void setFieldPermutation(int[] fieldPermutation) {
- this.fieldPermutation = fieldPermutation;
+ this.fieldPermutation = fieldPermutation;
}
-
+
public void reset(IFrameTupleAccessor fta, int tIndex) {
this.fta = fta;
this.tIndex = tIndex;
@@ -38,8 +53,9 @@
}
@Override
- public int getFieldStart(int fIdx) {
- return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength() + fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
+ public int getFieldStart(int fIdx) {
+ return fta.getTupleStartOffset(tIndex) + fta.getFieldSlotsLength()
+ + fta.getFieldStartOffset(tIndex, fieldPermutation[fIdx]);
}
@Override
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java
index 65327ae..6eeb5e1 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java
@@ -50,61 +50,66 @@
// WARNING: only works when tupleWriter is an instance of TypeAwareTupleWriter
public class FieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
-
- protected static final int pageLsnOff = 0; // 0
- protected static final int tupleCountOff = pageLsnOff + 4; // 4
- protected static final int freeSpaceOff = tupleCountOff + 4; // 8
- protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 12
- protected static final int levelOff = totalFreeSpaceOff + 4; // 16
- protected static final int smFlagOff = levelOff + 1; // 17
- protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 18
- protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 21
-
- protected static final int prevLeafOff = prefixTupleCountOff + 4; // 22
- protected static final int nextLeafOff = prevLeafOff + 4; // 26
-
- protected ICachedPage page = null;
+
+ protected static final int pageLsnOff = 0; // 0
+ protected static final int tupleCountOff = pageLsnOff + 4; // 4
+ protected static final int freeSpaceOff = tupleCountOff + 4; // 8
+ protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 12
+ protected static final int levelOff = totalFreeSpaceOff + 4; // 16
+ protected static final int smFlagOff = levelOff + 1; // 17
+ protected static final int uncompressedTupleCountOff = smFlagOff + 1; // 18
+ protected static final int prefixTupleCountOff = uncompressedTupleCountOff + 4; // 21
+
+ protected static final int prevLeafOff = prefixTupleCountOff + 4; // 22
+ protected static final int nextLeafOff = prevLeafOff + 4; // 26
+
+ protected ICachedPage page = null;
protected ByteBuffer buf = null;
public IFrameCompressor compressor;
- public IPrefixSlotManager slotManager; // TODO: should be protected, but will trigger some refactoring
-
+ public IPrefixSlotManager slotManager; // TODO: should be protected, but
+ // will trigger some refactoring
+
private IBTreeTupleWriter tupleWriter;
-
- private FieldPrefixTupleReference frameTuple;
+
+ private FieldPrefixTupleReference frameTuple;
private FieldPrefixPrefixTupleReference framePrefixTuple;
-
+
public FieldPrefixNSMLeafFrame(IBTreeTupleWriter tupleWriter) {
- this.tupleWriter = tupleWriter;
- this.frameTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- ITypeTrait[] typeTraits = ((TypeAwareTupleWriter)tupleWriter).getTypeTraits();
- this.framePrefixTuple = new FieldPrefixPrefixTupleReference(typeTraits);
- this.slotManager = new FieldPrefixSlotManager();
- this.compressor = new FieldPrefixCompressor(typeTraits, 0.001f, 2);
+ this.tupleWriter = tupleWriter;
+ this.frameTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+ ITypeTrait[] typeTraits = ((TypeAwareTupleWriter) tupleWriter).getTypeTraits();
+ this.framePrefixTuple = new FieldPrefixPrefixTupleReference(typeTraits);
+ this.slotManager = new FieldPrefixSlotManager();
+ this.compressor = new FieldPrefixCompressor(typeTraits, 0.001f, 2);
}
-
+
@Override
public void setPage(ICachedPage page) {
this.page = page;
this.buf = page.getBuffer();
slotManager.setFrame(this);
}
-
+
@Override
public ByteBuffer getBuffer() {
return page.getBuffer();
}
-
+
@Override
public ICachedPage getPage() {
return page;
}
-
+
@Override
- public boolean compress(MultiComparator cmp) throws Exception {
- return compressor.compress(this, cmp);
+ public boolean compress(MultiComparator cmp) throws HyracksDataException {
+ try {
+ return compressor.compress(this, cmp);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
}
-
- // assumptions:
+
+ // assumptions:
// 1. prefix tuple are stored contiguously
// 2. prefix tuple are located before tuples (physically on the page)
// 3. prefix tuple are sorted (last prefix tuple is at highest offset)
@@ -112,511 +117,545 @@
@Override
public void compact(MultiComparator cmp) {
resetSpaceParams();
-
+
frameTuple.setFieldCount(cmp.getFieldCount());
-
+
int tupleCount = buf.getInt(tupleCountOff);
-
- // determine start of target free space (depends on assumptions stated above)
- int freeSpace = buf.getInt(freeSpaceOff);
+
+ // determine start of target free space (depends on assumptions stated
+ // above)
+ int freeSpace = buf.getInt(freeSpaceOff);
int prefixTupleCount = buf.getInt(prefixTupleCountOff);
- if(prefixTupleCount > 0) {
-
- // debug
- int max = 0;
- for(int i = 0; i < prefixTupleCount; i++) {
- framePrefixTuple.resetByTupleIndex(this, i);
- int end = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount()-1) + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount()-1);
- if(end > max) max = end;
- }
-
- framePrefixTuple.resetByTupleIndex(this, prefixTupleCount - 1);
- freeSpace = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount()-1) + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount()-1);
+ if (prefixTupleCount > 0) {
+
+ // debug
+ int max = 0;
+ for (int i = 0; i < prefixTupleCount; i++) {
+ framePrefixTuple.resetByTupleIndex(this, i);
+ int end = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
+ + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
+ if (end > max)
+ max = end;
+ }
+
+ framePrefixTuple.resetByTupleIndex(this, prefixTupleCount - 1);
+ freeSpace = framePrefixTuple.getFieldStart(framePrefixTuple.getFieldCount() - 1)
+ + framePrefixTuple.getFieldLength(framePrefixTuple.getFieldCount() - 1);
}
ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
sortedTupleOffs.ensureCapacity(tupleCount);
- for(int i = 0; i < tupleCount; i++) {
+ for (int i = 0; i < tupleCount; i++) {
int tupleSlotOff = slotManager.getTupleSlotOff(i);
int tupleSlot = buf.getInt(tupleSlotOff);
int tupleOff = slotManager.decodeSecondSlotField(tupleSlot);
- sortedTupleOffs.add(new SlotOffTupleOff(i, tupleSlotOff, tupleOff));
-
+ sortedTupleOffs.add(new SlotOffTupleOff(i, tupleSlotOff, tupleOff));
+
}
Collections.sort(sortedTupleOffs);
-
- for(int i = 0; i < sortedTupleOffs.size(); i++) {
- int tupleOff = sortedTupleOffs.get(i).tupleOff;
- int tupleSlot = buf.getInt(sortedTupleOffs.get(i).slotOff);
- int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
-
+
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ int tupleSlot = buf.getInt(sortedTupleOffs.get(i).slotOff);
+ int prefixSlotNum = slotManager.decodeFirstSlotField(tupleSlot);
+
frameTuple.resetByTupleIndex(this, sortedTupleOffs.get(i).tupleIndex);
- int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount()-1) + frameTuple.getFieldLength(frameTuple.getFieldCount()-1);
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
int tupleLength = tupleEndOff - tupleOff;
System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
+
slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
freeSpace += tupleLength;
}
-
+
buf.putInt(freeSpaceOff, freeSpace);
- int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff) - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+ int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
+ - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
buf.putInt(totalFreeSpaceOff, totalFreeSpace);
}
-
+
@Override
- public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
- int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
+ int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
int tupleIndex = slotManager.decodeSecondSlotField(slot);
- if(tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT) {
- throw new BTreeException("Key to be deleted does not exist.");
- }
- else {
- int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+ if (tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT) {
+ throw new BTreeException("Key to be deleted does not exist.");
+ } else {
+ int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
int tupleSlotOff = slotManager.getTupleSlotOff(tupleIndex);
-
- if(exactDelete) {
+
+ if (exactDelete) {
frameTuple.setFieldCount(cmp.getFieldCount());
frameTuple.resetByTupleIndex(this, tupleIndex);
-
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount()-1, cmp.getFieldCount() - cmp.getKeyFieldCount());
- if(comparison != 0) {
- throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
- }
+
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
+ .getFieldCount()
+ - cmp.getKeyFieldCount());
+ if (comparison != 0) {
+ throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+ }
}
-
+
// perform deletion (we just do a memcpy to overwrite the slot)
int slotEndOff = slotManager.getTupleSlotEndOff();
int length = tupleSlotOff - slotEndOff;
System.arraycopy(buf.array(), slotEndOff, buf.array(), slotEndOff + slotManager.getSlotSize(), length);
-
- // maintain space information, get size of tuple suffix (suffix could be entire tuple)
- int tupleSize = 0;
+
+ // maintain space information, get size of tuple suffix (suffix
+ // could be entire tuple)
+ int tupleSize = 0;
int suffixFieldStart = 0;
- if(prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ if (prefixSlotNum == FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
suffixFieldStart = 0;
buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) - 1);
- }
- else {
+ } else {
int prefixSlot = buf.getInt(slotManager.getPrefixSlotOff(prefixSlotNum));
- suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
+ suffixFieldStart = slotManager.decodeFirstSlotField(prefixSlot);
}
-
+
frameTuple.resetByTupleIndex(this, tupleIndex);
- tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount() - suffixFieldStart);
-
+ tupleSize = tupleWriter.bytesRequired(frameTuple, suffixFieldStart, frameTuple.getFieldCount()
+ - suffixFieldStart);
+
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
- }
- }
-
- @Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
- int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff) - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
-
- int bytesRequired = tupleWriter.bytesRequired(tuple);
-
- // see if the tuple would fit uncompressed
- if(bytesRequired + slotManager.getSlotSize() <= freeContiguous) return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
-
- // see if tuple would fit into remaining space after compaction
- if(bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) return SpaceStatus.SUFFICIENT_SPACE;
-
- // see if the tuple matches a prefix and will fit after truncating the prefix
- int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple, cmp);
- if(prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
- int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
- int prefixSlot = buf.getInt(prefixSlotOff);
- int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
-
- int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields);
- if(compressedSize + slotManager.getSlotSize() <= freeContiguous) return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
}
-
+ }
+
+ @Override
+ public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
+ - ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
+
+ int bytesRequired = tupleWriter.bytesRequired(tuple);
+
+ // see if the tuple would fit uncompressed
+ if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
+ return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+
+ // see if tuple would fit into remaining space after compaction
+ if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+ return SpaceStatus.SUFFICIENT_SPACE;
+
+ // see if the tuple matches a prefix and will fit after truncating the
+ // prefix
+ int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple, cmp);
+ if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
+ int prefixSlot = buf.getInt(prefixSlotOff);
+ int numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+
+ int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
+ - numPrefixFields);
+ if (compressedSize + slotManager.getSlotSize() <= freeContiguous)
+ return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ }
+
return SpaceStatus.INSUFFICIENT_SPACE;
}
-
+
@Override
public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
// TODO Auto-generated method stub
return SpaceStatus.INSUFFICIENT_SPACE;
}
- protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, getOrigFreeSpaceOff());
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, getOrigFreeSpaceOff());
buf.putInt(totalFreeSpaceOff, getOrigTotalFreeSpace());
}
-
+
@Override
- public void initBuffer(byte level) {
- buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn during creation
- buf.putInt(tupleCountOff, 0);
+ public void initBuffer(byte level) {
+ buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
+ // during creation
+ buf.putInt(tupleCountOff, 0);
resetSpaceParams();
buf.putInt(uncompressedTupleCountOff, 0);
buf.putInt(prefixTupleCountOff, 0);
buf.put(levelOff, level);
- buf.put(smFlagOff, (byte)0);
+ buf.put(smFlagOff, (byte) 0);
buf.putInt(prevLeafOff, -1);
- buf.putInt(nextLeafOff, -1);
+ buf.putInt(nextLeafOff, -1);
}
-
+
public void setTotalFreeSpace(int totalFreeSpace) {
buf.putInt(totalFreeSpaceOff, totalFreeSpace);
}
-
+
public int getOrigTotalFreeSpace() {
return buf.capacity() - (nextLeafOff + 4);
}
-
+
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-
- slot = slotManager.insertSlot(slot, buf.getInt(freeSpaceOff));
-
- int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
+ public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+
+ slot = slotManager.insertSlot(slot, buf.getInt(freeSpaceOff));
+
+ int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
int numPrefixFields = 0;
- if(prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
- int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
+ if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
int prefixSlot = buf.getInt(prefixSlotOff);
- numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+ numPrefixFields = slotManager.decodeFirstSlotField(prefixSlot);
+ } else {
+ buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
}
- else {
- buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
- }
-
+
int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields, tuple.getFieldCount() - numPrefixFields, buf, freeSpace);
-
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, numPrefixFields,
+ tuple.getFieldCount() - numPrefixFields, buf, freeSpace);
+
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
}
-
+
@Override
public void update(int rid, ITupleReference tuple) throws Exception {
// TODO Auto-generated method stub
-
- }
-
+
+ }
+
@Override
public void printHeader() {
// TODO Auto-generated method stub
-
- }
-
+
+ }
+
@Override
public int getTupleCount() {
return buf.getInt(tupleCountOff);
}
-
+
public ISlotManager getSlotManager() {
return null;
}
-
+
@Override
- public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- int tupleCount = buf.getInt(tupleCountOff);
- frameTuple.setFieldCount(fields.length);
- for(int i = 0; i < tupleCount; i++) {
- frameTuple.resetByTupleIndex(this, i);
- for(int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[j].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- strBuilder.append(" | ");
- }
- strBuilder.append("\n");
- return strBuilder.toString();
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(fields.length);
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
+ .getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
}
-
+
@Override
- public int getTupleOffset(int slotNum) {
- int tupleSlotOff = slotManager.getTupleSlotOff(slotNum);
- int tupleSlot = buf.getInt(tupleSlotOff);
- return slotManager.decodeSecondSlotField(tupleSlot);
+ public int getTupleOffset(int slotNum) {
+ int tupleSlotOff = slotManager.getTupleSlotOff(slotNum);
+ int tupleSlot = buf.getInt(tupleSlotOff);
+ return slotManager.decodeSecondSlotField(tupleSlot);
}
-
+
@Override
public int getPageLsn() {
- return buf.getInt(pageLsnOff);
+ return buf.getInt(pageLsnOff);
}
@Override
public void setPageLsn(int pageLsn) {
- buf.putInt(pageLsnOff, pageLsn);
+ buf.putInt(pageLsnOff, pageLsn);
}
@Override
public int getTotalFreeSpace() {
return buf.getInt(totalFreeSpaceOff);
}
-
- @Override
- public boolean isLeaf() {
- return buf.get(levelOff) == 0;
- }
-
- @Override
- public byte getLevel() {
- return buf.get(levelOff);
- }
-
- @Override
- public void setLevel(byte level) {
- buf.put(levelOff, level);
- }
-
- @Override
- public boolean getSmFlag() {
- return buf.get(smFlagOff) != 0;
- }
- @Override
- public void setSmFlag(boolean smFlag) {
- if(smFlag)
- buf.put(smFlagOff, (byte)1);
- else
- buf.put(smFlagOff, (byte)0);
- }
-
- public int getPrefixTupleCount() {
- return buf.getInt(prefixTupleCountOff);
- }
-
- public void setPrefixTupleCount(int prefixTupleCount) {
- buf.putInt(prefixTupleCountOff, prefixTupleCount);
- }
-
@Override
- public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws Exception {
- int freeSpace = buf.getInt(freeSpaceOff);
- int fieldsToTruncate = 0;
-
- // check if tuple matches last prefix tuple
- if(buf.getInt(prefixTupleCountOff) > 0) {
- framePrefixTuple.resetByTupleIndex(this, buf.getInt(prefixTupleCountOff)-1);
- if(cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount()) == 0) {
- fieldsToTruncate = framePrefixTuple.getFieldCount();
- }
- }
-
- int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount() - fieldsToTruncate, buf, freeSpace);
-
- // insert slot
- int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
- if(fieldsToTruncate > 0) prefixSlotNum = buf.getInt(prefixTupleCountOff)-1;
- else buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
- int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_SLOT);
- slotManager.insertSlot(insSlot, freeSpace);
-
- // update page metadata
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ public boolean isLeaf() {
+ return buf.get(levelOff) == 0;
}
-
+
@Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey) throws Exception {
-
- FieldPrefixNSMLeafFrame rf = (FieldPrefixNSMLeafFrame)rightFrame;
-
- frameTuple.setFieldCount(cmp.getFieldCount());
-
- // before doing anything check if key already exists
- int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int tupleSlotNum = slotManager.decodeSecondSlotField(slot);
- if(tupleSlotNum != FieldPrefixSlotManager.GREATEST_SLOT) {
- frameTuple.resetByTupleIndex(this, tupleSlotNum);
- if(cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key into unique index");
- }
- }
-
- ByteBuffer right = rf.getBuffer();
- int tupleCount = getTupleCount();
- int prefixTupleCount = getPrefixTupleCount();
-
- int tuplesToLeft;
- int midSlotNum = tupleCount / 2;
- IBTreeFrame targetFrame = null;
- frameTuple.resetByTupleIndex(this, midSlotNum);
- int comparison = cmp.compare(tuple, frameTuple);
- if (comparison >= 0) {
- tuplesToLeft = midSlotNum + (tupleCount % 2);
- targetFrame = rf;
- } else {
- tuplesToLeft = midSlotNum;
- targetFrame = this;
- }
- int tuplesToRight = tupleCount - tuplesToLeft;
-
- // copy entire page
- System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
- // determine how many slots go on left and right page
- int prefixesToLeft = prefixTupleCount;
- for(int i = tuplesToLeft; i < tupleCount; i++) {
- int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
- int tupleSlot = right.getInt(tupleSlotOff);
- int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
- if(prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
- prefixesToLeft = prefixSlotNum;
- break;
- }
- }
-
- // if we are splitting in the middle of a prefix both pages need to have the prefix slot and tuple
- int boundaryTupleSlotOff = rf.slotManager.getTupleSlotOff(tuplesToLeft-1);
- int boundaryTupleSlot = buf.getInt(boundaryTupleSlotOff);
- int boundaryPrefixSlotNum = rf.slotManager.decodeFirstSlotField(boundaryTupleSlot);
- int prefixesToRight = prefixTupleCount - prefixesToLeft;
- if(boundaryPrefixSlotNum == prefixesToLeft && boundaryPrefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
- prefixesToLeft++; // tuples on both pages share one prefix
- }
-
- // move prefix tuples on right page to beginning of page and adjust prefix slots
- if(prefixesToRight > 0 && prefixesToLeft > 0 && prefixTupleCount > 1) {
-
- int freeSpace = rf.getOrigFreeSpaceOff();
- int lastPrefixSlotNum = -1;
-
- for(int i = tuplesToLeft; i < tupleCount; i++) {
- int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
- int tupleSlot = right.getInt(tupleSlotOff);
- int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
- if(prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
- framePrefixTuple.resetByTupleIndex(this, prefixSlotNum);
-
- int bytesWritten = 0;
- if(lastPrefixSlotNum != prefixSlotNum) {
- bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right, freeSpace);
- int newPrefixSlot = rf.slotManager.encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
- int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
- right.putInt(prefixSlotOff, newPrefixSlot);
- lastPrefixSlotNum = prefixSlotNum;
- }
-
- int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
- int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum - (prefixTupleCount - prefixesToRight), tupleOff);
- right.putInt(tupleSlotOff, newTupleSlot);
- freeSpace += bytesWritten;
- }
- }
- }
-
- // move the modified prefix slots on the right page
- int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
- int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
- int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
- System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
-
- // on right page we need to copy rightmost tuple slots to left
- int src = rf.slotManager.getTupleSlotEndOff();
- int dest = rf.slotManager.getTupleSlotEndOff() + tuplesToLeft * rf.slotManager.getSlotSize() + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
- int length = rf.slotManager.getSlotSize() * tuplesToRight;
- System.arraycopy(right.array(), src, right.array(), dest, length);
-
- right.putInt(tupleCountOff, tuplesToRight);
- right.putInt(prefixTupleCountOff, prefixesToRight);
-
- // on left page move slots to reflect possibly removed prefixes
- src = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize();
- dest = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize() + (prefixTupleCount - prefixesToLeft) * slotManager.getSlotSize();
- length = slotManager.getSlotSize() * tuplesToLeft;
- System.arraycopy(buf.array(), src, buf.array(), dest, length);
-
- buf.putInt(tupleCountOff, tuplesToLeft);
- buf.putInt(prefixTupleCountOff, prefixesToLeft);
-
- // compact both pages
- compact(cmp);
- rightFrame.compact(cmp);
-
- // insert last key
- targetFrame.insert(tuple, cmp);
-
- // set split key to be highest value in left page
- frameTuple.resetByTupleIndex(this, getTupleCount()-1);
-
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- splitKey.initData(splitKeySize);
- tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
-
- return 0;
+ public byte getLevel() {
+ return buf.get(levelOff);
}
-
- @Override
- public int getFreeSpaceOff() {
- return buf.getInt(freeSpaceOff);
- }
-
- public int getOrigFreeSpaceOff() {
- return nextLeafOff + 4;
- }
-
- @Override
- public void setFreeSpaceOff(int freeSpace) {
- buf.putInt(freeSpaceOff, freeSpace);
- }
-
- @Override
- public void setNextLeaf(int page) {
- buf.putInt(nextLeafOff, page);
- }
- @Override
- public void setPrevLeaf(int page) {
- buf.putInt(prevLeafOff, page);
- }
-
- @Override
- public int getNextLeaf() {
- return buf.getInt(nextLeafOff);
- }
-
- @Override
- public int getPrevLeaf() {
- return buf.getInt(prevLeafOff);
- }
-
- public int getUncompressedTupleCount() {
- return buf.getInt(uncompressedTupleCountOff);
- }
-
- public void setUncompressedTupleCount(int uncompressedTupleCount) {
- buf.putInt(uncompressedTupleCountOff, uncompressedTupleCount);
- }
-
- @Override
- public int getSlotSize() {
- return slotManager.getSlotSize();
- }
-
- @Override
- public void setPageTupleFieldCount(int fieldCount) {
- frameTuple.setFieldCount(fieldCount);
- }
-
- public IBTreeTupleWriter getTupleWriter() {
- return tupleWriter;
+ @Override
+ public void setLevel(byte level) {
+ buf.put(levelOff, level);
}
-
- @Override
- public IBTreeTupleReference createTupleReference() {
- return new FieldPrefixTupleReference(tupleWriter.createTupleReference());
- }
-
- @Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp, FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
- int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
- int tupleIndex = slotManager.decodeSecondSlotField(slot);
- if(tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT) return -1;
- else return tupleIndex;
- }
-
+
+ @Override
+ public boolean getSmFlag() {
+ return buf.get(smFlagOff) != 0;
+ }
+
+ @Override
+ public void setSmFlag(boolean smFlag) {
+ if (smFlag)
+ buf.put(smFlagOff, (byte) 1);
+ else
+ buf.put(smFlagOff, (byte) 0);
+ }
+
+ public int getPrefixTupleCount() {
+ return buf.getInt(prefixTupleCountOff);
+ }
+
+ public void setPrefixTupleCount(int prefixTupleCount) {
+ buf.putInt(prefixTupleCountOff, prefixTupleCount);
+ }
+
+ @Override
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int fieldsToTruncate = 0;
+
+ // check if tuple matches last prefix tuple
+ if (buf.getInt(prefixTupleCountOff) > 0) {
+ framePrefixTuple.resetByTupleIndex(this, buf.getInt(prefixTupleCountOff) - 1);
+ if (cmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount()) == 0) {
+ fieldsToTruncate = framePrefixTuple.getFieldCount();
+ }
+ }
+
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, fieldsToTruncate, tuple.getFieldCount()
+ - fieldsToTruncate, buf, freeSpace);
+
+ // insert slot
+ int prefixSlotNum = FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
+ if (fieldsToTruncate > 0)
+ prefixSlotNum = buf.getInt(prefixTupleCountOff) - 1;
+ else
+ buf.putInt(uncompressedTupleCountOff, buf.getInt(uncompressedTupleCountOff) + 1);
+ int insSlot = slotManager.encodeSlotFields(prefixSlotNum, FieldPrefixSlotManager.GREATEST_SLOT);
+ slotManager.insertSlot(insSlot, freeSpace);
+
+ // update page metadata
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
+
+ @Override
+ public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ throws Exception {
+
+ FieldPrefixNSMLeafFrame rf = (FieldPrefixNSMLeafFrame) rightFrame;
+
+ frameTuple.setFieldCount(cmp.getFieldCount());
+
+ // before doing anything check if key already exists
+ int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int tupleSlotNum = slotManager.decodeSecondSlotField(slot);
+ if (tupleSlotNum != FieldPrefixSlotManager.GREATEST_SLOT) {
+ frameTuple.resetByTupleIndex(this, tupleSlotNum);
+ if (cmp.compare(tuple, frameTuple) == 0) {
+ throw new BTreeException("Inserting duplicate key into unique index");
+ }
+ }
+
+ ByteBuffer right = rf.getBuffer();
+ int tupleCount = getTupleCount();
+ int prefixTupleCount = getPrefixTupleCount();
+
+ int tuplesToLeft;
+ int midSlotNum = tupleCount / 2;
+ IBTreeFrame targetFrame = null;
+ frameTuple.resetByTupleIndex(this, midSlotNum);
+ int comparison = cmp.compare(tuple, frameTuple);
+ if (comparison >= 0) {
+ tuplesToLeft = midSlotNum + (tupleCount % 2);
+ targetFrame = rf;
+ } else {
+ tuplesToLeft = midSlotNum;
+ targetFrame = this;
+ }
+ int tuplesToRight = tupleCount - tuplesToLeft;
+
+ // copy entire page
+ System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+ // determine how many slots go on left and right page
+ int prefixesToLeft = prefixTupleCount;
+ for (int i = tuplesToLeft; i < tupleCount; i++) {
+ int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
+ int tupleSlot = right.getInt(tupleSlotOff);
+ int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
+ if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ prefixesToLeft = prefixSlotNum;
+ break;
+ }
+ }
+
+ // if we are splitting in the middle of a prefix both pages need to have
+ // the prefix slot and tuple
+ int boundaryTupleSlotOff = rf.slotManager.getTupleSlotOff(tuplesToLeft - 1);
+ int boundaryTupleSlot = buf.getInt(boundaryTupleSlotOff);
+ int boundaryPrefixSlotNum = rf.slotManager.decodeFirstSlotField(boundaryTupleSlot);
+ int prefixesToRight = prefixTupleCount - prefixesToLeft;
+ if (boundaryPrefixSlotNum == prefixesToLeft
+ && boundaryPrefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ prefixesToLeft++; // tuples on both pages share one prefix
+ }
+
+ // move prefix tuples on right page to beginning of page and adjust
+ // prefix slots
+ if (prefixesToRight > 0 && prefixesToLeft > 0 && prefixTupleCount > 1) {
+
+ int freeSpace = rf.getOrigFreeSpaceOff();
+ int lastPrefixSlotNum = -1;
+
+ for (int i = tuplesToLeft; i < tupleCount; i++) {
+ int tupleSlotOff = rf.slotManager.getTupleSlotOff(i);
+ int tupleSlot = right.getInt(tupleSlotOff);
+ int prefixSlotNum = rf.slotManager.decodeFirstSlotField(tupleSlot);
+ if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+ framePrefixTuple.resetByTupleIndex(this, prefixSlotNum);
+
+ int bytesWritten = 0;
+ if (lastPrefixSlotNum != prefixSlotNum) {
+ bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right, freeSpace);
+ int newPrefixSlot = rf.slotManager
+ .encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
+ int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
+ right.putInt(prefixSlotOff, newPrefixSlot);
+ lastPrefixSlotNum = prefixSlotNum;
+ }
+
+ int tupleOff = rf.slotManager.decodeSecondSlotField(tupleSlot);
+ int newTupleSlot = rf.slotManager.encodeSlotFields(prefixSlotNum
+ - (prefixTupleCount - prefixesToRight), tupleOff);
+ right.putInt(tupleSlotOff, newTupleSlot);
+ freeSpace += bytesWritten;
+ }
+ }
+ }
+
+ // move the modified prefix slots on the right page
+ int prefixSrc = rf.slotManager.getPrefixSlotEndOff();
+ int prefixDest = rf.slotManager.getPrefixSlotEndOff() + (prefixTupleCount - prefixesToRight)
+ * rf.slotManager.getSlotSize();
+ int prefixLength = rf.slotManager.getSlotSize() * prefixesToRight;
+ System.arraycopy(right.array(), prefixSrc, right.array(), prefixDest, prefixLength);
+
+ // on right page we need to copy rightmost tuple slots to left
+ int src = rf.slotManager.getTupleSlotEndOff();
+ int dest = rf.slotManager.getTupleSlotEndOff() + tuplesToLeft * rf.slotManager.getSlotSize()
+ + (prefixTupleCount - prefixesToRight) * rf.slotManager.getSlotSize();
+ int length = rf.slotManager.getSlotSize() * tuplesToRight;
+ System.arraycopy(right.array(), src, right.array(), dest, length);
+
+ right.putInt(tupleCountOff, tuplesToRight);
+ right.putInt(prefixTupleCountOff, prefixesToRight);
+
+ // on left page move slots to reflect possibly removed prefixes
+ src = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize();
+ dest = slotManager.getTupleSlotEndOff() + tuplesToRight * slotManager.getSlotSize()
+ + (prefixTupleCount - prefixesToLeft) * slotManager.getSlotSize();
+ length = slotManager.getSlotSize() * tuplesToLeft;
+ System.arraycopy(buf.array(), src, buf.array(), dest, length);
+
+ buf.putInt(tupleCountOff, tuplesToLeft);
+ buf.putInt(prefixTupleCountOff, prefixesToLeft);
+
+ // compact both pages
+ compact(cmp);
+ rightFrame.compact(cmp);
+
+ // insert last key
+ targetFrame.insert(tuple, cmp);
+
+ // set split key to be highest value in left page
+ frameTuple.resetByTupleIndex(this, getTupleCount() - 1);
+
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+ splitKey.initData(splitKeySize);
+ tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
+ splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
+
+ return 0;
+ }
+
+ @Override
+ public int getFreeSpaceOff() {
+ return buf.getInt(freeSpaceOff);
+ }
+
+ public int getOrigFreeSpaceOff() {
+ return nextLeafOff + 4;
+ }
+
+ @Override
+ public void setFreeSpaceOff(int freeSpace) {
+ buf.putInt(freeSpaceOff, freeSpace);
+ }
+
+ @Override
+ public void setNextLeaf(int page) {
+ buf.putInt(nextLeafOff, page);
+ }
+
+ @Override
+ public void setPrevLeaf(int page) {
+ buf.putInt(prevLeafOff, page);
+ }
+
+ @Override
+ public int getNextLeaf() {
+ return buf.getInt(nextLeafOff);
+ }
+
+ @Override
+ public int getPrevLeaf() {
+ return buf.getInt(prevLeafOff);
+ }
+
+ public int getUncompressedTupleCount() {
+ return buf.getInt(uncompressedTupleCountOff);
+ }
+
+ public void setUncompressedTupleCount(int uncompressedTupleCount) {
+ buf.putInt(uncompressedTupleCountOff, uncompressedTupleCount);
+ }
+
+ @Override
+ public int getSlotSize() {
+ return slotManager.getSlotSize();
+ }
+
+ @Override
+ public void setPageTupleFieldCount(int fieldCount) {
+ frameTuple.setFieldCount(fieldCount);
+ }
+
+ public IBTreeTupleWriter getTupleWriter() {
+ return tupleWriter;
+ }
+
+ @Override
+ public IBTreeTupleReference createTupleReference() {
+ return new FieldPrefixTupleReference(tupleWriter.createTupleReference());
+ }
+
+ @Override
+ public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
+ int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
+ int tupleIndex = slotManager.decodeSecondSlotField(slot);
+ if (tupleIndex == FieldPrefixSlotManager.GREATEST_SLOT)
+ return -1;
+ else
+ return tupleIndex;
+ }
+
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java
index 21cfbcb..a3084ab 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java
@@ -20,15 +20,16 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
public class FieldPrefixNSMLeafFrameFactory implements IBTreeLeafFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
-
- public FieldPrefixNSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
- this.tupleWriterFactory = tupleWriterFactory;
- }
- @Override
- public IBTreeLeafFrame getFrame() {
- return new FieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
- }
+
+ private static final long serialVersionUID = 1L;
+ private IBTreeTupleWriterFactory tupleWriterFactory;
+
+ public FieldPrefixNSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ }
+
+ @Override
+ public IBTreeLeafFrame getFrame() {
+ return new FieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java
index 9c8fa3f..a219dd6 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java
@@ -26,46 +26,45 @@
// other meta pages (i.e., with level -2) have junk in the max page field
public class MetaDataFrame implements IBTreeMetaDataFrame {
-
- protected static final int tupleCountOff = 0;
+
+ protected static final int tupleCountOff = 0;
protected static final int freeSpaceOff = tupleCountOff + 4;
protected static final int maxPageOff = freeSpaceOff + 4;
protected static final byte levelOff = maxPageOff + 1;
protected static final byte nextPageOff = maxPageOff + 8;
-
+
protected ICachedPage page = null;
- protected ByteBuffer buf = null;
-
+ protected ByteBuffer buf = null;
+
public int getMaxPage() {
return buf.getInt(maxPageOff);
}
-
+
public void setMaxPage(int maxPage) {
buf.putInt(maxPageOff, maxPage);
}
-
- public int getFreePage() {
- int tupleCount = buf.getInt(tupleCountOff);
- if(tupleCount > 0) {
+
+ public int getFreePage() {
+ int tupleCount = buf.getInt(tupleCountOff);
+ if (tupleCount > 0) {
// return the last page from the linked list of free pages
// TODO: this is a dumb policy, but good enough for now
int lastPageOff = buf.getInt(freeSpaceOff) - 4;
buf.putInt(freeSpaceOff, lastPageOff);
buf.putInt(tupleCountOff, tupleCount - 1);
return buf.getInt(lastPageOff);
- }
- else {
+ } else {
return -1;
- }
+ }
}
-
-
+
// must be checked before adding free page
- // user of this class is responsible for getting a free page as a new meta page, latching it, etc. if there is no space on this page
+ // user of this class is responsible for getting a free page as a new meta
+ // page, latching it, etc. if there is no space on this page
public boolean hasSpace() {
return buf.getInt(freeSpaceOff) + 4 < buf.capacity();
}
-
+
// on bounds checking is done, there must be free space
public void addFreePage(int freePage) {
int freeSpace = buf.getInt(freeSpaceOff);
@@ -75,24 +74,24 @@
}
@Override
- public byte getLevel() {
+ public byte getLevel() {
return buf.get(levelOff);
}
-
+
@Override
public void setLevel(byte level) {
- buf.put(levelOff, level);
+ buf.put(levelOff, level);
}
-
+
@Override
public ICachedPage getPage() {
return page;
- }
+ }
@Override
public void setPage(ICachedPage page) {
this.page = page;
- this.buf = page.getBuffer();
+ this.buf = page.getBuffer();
}
@Override
@@ -102,7 +101,7 @@
buf.putInt(levelOff, level);
buf.putInt(nextPageOff, -1);
}
-
+
@Override
public int getNextPage() {
return buf.getInt(nextPageOff);
@@ -111,5 +110,5 @@
@Override
public void setNextPage(int nextPage) {
buf.putInt(nextPageOff, nextPage);
- }
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java
index d11a920..9896bf9 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java
@@ -20,7 +20,7 @@
public class MetaDataFrameFactory implements IBTreeMetaDataFrameFactory {
@Override
- public IBTreeMetaDataFrame getFrame() {
+ public IBTreeMetaDataFrame getFrame() {
return new MetaDataFrame();
- }
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java
index 01d24ce0..2324eab 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java
@@ -39,264 +39,277 @@
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public abstract class NSMFrame implements IBTreeFrame {
-
- protected static final int pageLsnOff = 0; // 0
- protected static final int tupleCountOff = pageLsnOff + 4; // 4
- protected static final int freeSpaceOff = tupleCountOff + 4; // 8
- protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
- protected static final byte levelOff = totalFreeSpaceOff + 4;
- protected static final byte smFlagOff = levelOff + 1;
-
- protected ICachedPage page = null;
- protected ByteBuffer buf = null;
- protected ISlotManager slotManager;
-
- protected IBTreeTupleWriter tupleWriter;
- protected IBTreeTupleReference frameTuple;
-
- public NSMFrame(IBTreeTupleWriter tupleWriter) {
- this.tupleWriter = tupleWriter;
- this.frameTuple = tupleWriter.createTupleReference();
- this.slotManager = new OrderedSlotManager();
- }
-
- @Override
- public void initBuffer(byte level) {
- buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn during creation
- buf.putInt(tupleCountOff, 0);
- resetSpaceParams();
- buf.put(levelOff, level);
- buf.put(smFlagOff, (byte)0);
- }
-
- @Override
- public boolean isLeaf() {
- return buf.get(levelOff) == 0;
- }
-
- @Override
- public byte getLevel() {
- return buf.get(levelOff);
- }
-
- @Override
- public void setLevel(byte level) {
- buf.put(levelOff, level);
- }
-
- @Override
- public boolean getSmFlag() {
- return buf.get(smFlagOff) != 0;
- }
- @Override
- public void setSmFlag(boolean smFlag) {
- if(smFlag)buf.put(smFlagOff, (byte)1);
- else buf.put(smFlagOff, (byte)0);
- }
-
- @Override
- public int getFreeSpaceOff() {
- return buf.getInt(freeSpaceOff);
- }
+ protected static final int pageLsnOff = 0; // 0
+ protected static final int tupleCountOff = pageLsnOff + 4; // 4
+ protected static final int freeSpaceOff = tupleCountOff + 4; // 8
+ protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+ protected static final byte levelOff = totalFreeSpaceOff + 4;
+ protected static final byte smFlagOff = levelOff + 1;
- @Override
- public void setFreeSpaceOff(int freeSpace) {
- buf.putInt(freeSpaceOff, freeSpace);
- }
-
- @Override
- public void setPage(ICachedPage page) {
- this.page = page;
- this.buf = page.getBuffer();
- slotManager.setFrame(this);
- }
-
- @Override
- public ByteBuffer getBuffer() {
- return page.getBuffer();
- }
-
- @Override
- public ICachedPage getPage() {
- return page;
- }
-
- @Override
- public void compact(MultiComparator cmp) {
- resetSpaceParams();
- frameTuple.setFieldCount(cmp.getFieldCount());
-
- int tupleCount = buf.getInt(tupleCountOff);
- int freeSpace = buf.getInt(freeSpaceOff);
-
- ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
- sortedTupleOffs.ensureCapacity(tupleCount);
- for(int i = 0; i < tupleCount; i++) {
- int slotOff = slotManager.getSlotOff(i);
- int tupleOff = slotManager.getTupleOff(slotOff);
- sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
- }
- Collections.sort(sortedTupleOffs);
-
- for(int i = 0; i < sortedTupleOffs.size(); i++) {
- int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByOffset(buf, tupleOff);
-
- int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount()-1) + frameTuple.getFieldLength(frameTuple.getFieldCount()-1);
+ protected ICachedPage page = null;
+ protected ByteBuffer buf = null;
+ protected ISlotManager slotManager;
+
+ protected IBTreeTupleWriter tupleWriter;
+ protected IBTreeTupleReference frameTuple;
+
+ public NSMFrame(IBTreeTupleWriter tupleWriter) {
+ this.tupleWriter = tupleWriter;
+ this.frameTuple = tupleWriter.createTupleReference();
+ this.slotManager = new OrderedSlotManager();
+ }
+
+ @Override
+ public void initBuffer(byte level) {
+ buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
+ // during creation
+ buf.putInt(tupleCountOff, 0);
+ resetSpaceParams();
+ buf.put(levelOff, level);
+ buf.put(smFlagOff, (byte) 0);
+ }
+
+ @Override
+ public boolean isLeaf() {
+ return buf.get(levelOff) == 0;
+ }
+
+ @Override
+ public byte getLevel() {
+ return buf.get(levelOff);
+ }
+
+ @Override
+ public void setLevel(byte level) {
+ buf.put(levelOff, level);
+ }
+
+ @Override
+ public boolean getSmFlag() {
+ return buf.get(smFlagOff) != 0;
+ }
+
+ @Override
+ public void setSmFlag(boolean smFlag) {
+ if (smFlag)
+ buf.put(smFlagOff, (byte) 1);
+ else
+ buf.put(smFlagOff, (byte) 0);
+ }
+
+ @Override
+ public int getFreeSpaceOff() {
+ return buf.getInt(freeSpaceOff);
+ }
+
+ @Override
+ public void setFreeSpaceOff(int freeSpace) {
+ buf.putInt(freeSpaceOff, freeSpace);
+ }
+
+ @Override
+ public void setPage(ICachedPage page) {
+ this.page = page;
+ this.buf = page.getBuffer();
+ slotManager.setFrame(this);
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ return page.getBuffer();
+ }
+
+ @Override
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ @Override
+ public void compact(MultiComparator cmp) {
+ resetSpaceParams();
+ frameTuple.setFieldCount(cmp.getFieldCount());
+
+ int tupleCount = buf.getInt(tupleCountOff);
+ int freeSpace = buf.getInt(freeSpaceOff);
+
+ ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+ sortedTupleOffs.ensureCapacity(tupleCount);
+ for (int i = 0; i < tupleCount; i++) {
+ int slotOff = slotManager.getSlotOff(i);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+ }
+ Collections.sort(sortedTupleOffs);
+
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ frameTuple.resetByOffset(buf, tupleOff);
+
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
int tupleLength = tupleEndOff - tupleOff;
System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
- slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
- freeSpace += tupleLength;
- }
-
- buf.putInt(freeSpaceOff, freeSpace);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
- }
- @Override
- public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- if(tupleIndex < 0) {
- throw new BTreeException("Key to be deleted does not exist.");
- }
- else {
- if(exactDelete) {
- // check the non-key columns for equality by byte-by-byte comparison
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
-
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount()-1, cmp.getFieldCount() - cmp.getKeyFieldCount());
- if(comparison != 0) {
- throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
- }
- }
-
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
- int tupleSize = tupleWriter.bytesRequired(frameTuple);
-
- // perform deletion (we just do a memcpy to overwrite the slot)
- int slotStartOff = slotManager.getSlotEndOff();
- int length = slotOff - slotStartOff;
- System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
-
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
- }
- }
-
- @Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
- int bytesRequired = tupleWriter.bytesRequired(tuple);
- if(bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff) - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()) ) return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
- else if(bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) return SpaceStatus.SUFFICIENT_SPACE;
- else return SpaceStatus.INSUFFICIENT_SPACE;
- }
-
- @Override
- public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
- // TODO Auto-generated method stub
- return SpaceStatus.INSUFFICIENT_SPACE;
- }
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+ freeSpace += tupleLength;
+ }
- protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, totalFreeSpaceOff + 4);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (totalFreeSpaceOff + 4));
- }
-
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
-
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
- }
-
- @Override
- public void update(int rid, ITupleReference tuple) throws Exception {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void printHeader() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public int getTupleCount() {
- return buf.getInt(tupleCountOff);
- }
+ buf.putInt(freeSpaceOff, freeSpace);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+ }
- public ISlotManager getSlotManager() {
- return slotManager;
- }
-
- @Override
- public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- int tupleCount = buf.getInt(tupleCountOff);
- frameTuple.setFieldCount(fields.length);
- for(int i = 0; i < tupleCount; i++) {
- frameTuple.resetByTupleIndex(this, i);
- for(int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[j].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- strBuilder.append(" | ");
- }
- strBuilder.append("\n");
- return strBuilder.toString();
- }
-
- @Override
- public int getTupleOffset(int slotNum) {
- return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
- }
+ @Override
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ if (tupleIndex < 0) {
+ throw new BTreeException("Key to be deleted does not exist.");
+ } else {
+ if (exactDelete) {
+ // check the non-key columns for equality by byte-by-byte
+ // comparison
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByOffset(buf, tupleOff);
- @Override
- public int getPageLsn() {
- return buf.getInt(pageLsnOff);
- }
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
+ .getFieldCount()
+ - cmp.getKeyFieldCount());
+ if (comparison != 0) {
+ throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+ }
+ }
- @Override
- public void setPageLsn(int pageLsn) {
- buf.putInt(pageLsnOff, pageLsn);
- }
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
- @Override
- public int getTotalFreeSpace() {
- return buf.getInt(totalFreeSpaceOff);
- }
-
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+ }
+ }
+
+ @Override
+ public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int bytesRequired = tupleWriter.bytesRequired(tuple);
+ if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+ return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+ return SpaceStatus.SUFFICIENT_SPACE;
+ else
+ return SpaceStatus.INSUFFICIENT_SPACE;
+ }
+
+ @Override
+ public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
+ // TODO Auto-generated method stub
+ return SpaceStatus.INSUFFICIENT_SPACE;
+ }
+
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, totalFreeSpaceOff + 4);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (totalFreeSpaceOff + 4));
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
+
+ @Override
+ public void update(int rid, ITupleReference tuple) throws Exception {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public void printHeader() {
+ // TODO Auto-generated method stub
+
+ }
+
+ @Override
+ public int getTupleCount() {
+ return buf.getInt(tupleCountOff);
+ }
+
+ public ISlotManager getSlotManager() {
+ return slotManager;
+ }
+
+ @Override
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(fields.length);
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
+ .getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
+ }
+
+ @Override
+ public int getTupleOffset(int slotNum) {
+ return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
+ }
+
+ @Override
+ public int getPageLsn() {
+ return buf.getInt(pageLsnOff);
+ }
+
+ @Override
+ public void setPageLsn(int pageLsn) {
+ buf.putInt(pageLsnOff, pageLsn);
+ }
+
+ @Override
+ public int getTotalFreeSpace() {
+ return buf.getInt(totalFreeSpaceOff);
+ }
+
@Override
public boolean compress(MultiComparator cmp) {
return false;
}
-
+
@Override
public int getSlotSize() {
- return slotManager.getSlotSize();
+ return slotManager.getSlotSize();
}
-
+
@Override
- public void setPageTupleFieldCount(int fieldCount) {
- frameTuple.setFieldCount(fieldCount);
- }
-
+ public void setPageTupleFieldCount(int fieldCount) {
+ frameTuple.setFieldCount(fieldCount);
+ }
+
public IBTreeTupleWriter getTupleWriter() {
- return tupleWriter;
+ return tupleWriter;
}
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java
index b34a3df..90d400d4 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java
@@ -39,380 +39,412 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
public class NSMInteriorFrame extends NSMFrame implements IBTreeInteriorFrame {
-
- private static final int rightLeafOff = smFlagOff + 1;
-
- private static final int childPtrSize = 4;
-
- //private SimpleTupleReference cmpFrameTuple = new SimpleTupleReference();
- private IBTreeTupleReference cmpFrameTuple;
-
- public NSMInteriorFrame(IBTreeTupleWriter tupleWriter) {
- super(tupleWriter);
- cmpFrameTuple = tupleWriter.createTupleReference();
-
- }
-
- private int getLeftChildPageOff(ITupleReference tuple, MultiComparator cmp) {
- return tuple.getFieldStart(cmp.getKeyFieldCount()-1) + tuple.getFieldLength(cmp.getKeyFieldCount()-1);
- }
-
- @Override
- public void initBuffer(byte level) {
- super.initBuffer(level);
- buf.putInt(rightLeafOff, -1);
- }
-
- @Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
- int bytesRequired = tupleWriter.bytesRequired(tuple) + 8; // for the two childpointers
- if(bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff) - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()) ) return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
- else if(bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) return SpaceStatus.SUFFICIENT_SPACE;
- else return SpaceStatus.INSUFFICIENT_SPACE;
- }
-
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- boolean isDuplicate = true;
-
- if(tupleIndex < 0) isDuplicate = false; // greater than all existing keys
- else {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
- if(cmp.compare(tuple, frameTuple) != 0) isDuplicate = false;
- }
-
- if(isDuplicate) {
- throw new BTreeException("Trying to insert duplicate value into interior node.");
- }
- else {
- slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount()-1), getLeftChildPageOff(tuple, cmp), buf.array(), freeSpace + bytesWritten, childPtrSize);
- int tupleSize = bytesWritten + childPtrSize;
-
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-
- // did insert into the rightmost slot?
- if(slotOff == slotManager.getSlotEndOff()) {
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount()-1), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
- }
- else {
- // if slotOff has a right (slot-)neighbor then update its child pointer
- // the only time when this is NOT the case, is when this is the first tuple
- // (or when the splitkey goes into the rightmost slot but that case was handled in the if above)
- if(buf.getInt(tupleCountOff) > 1) {
- int rightNeighborOff = slotOff - slotManager.getSlotSize();
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(rightNeighborOff));
- System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
- }
- }
- }
- }
-
- @Override
- public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws Exception {
- int freeSpace = buf.getInt(freeSpaceOff);
- slotManager.insertSlot(-1, freeSpace);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount()-1), getLeftChildPageOff(tuple, cmp), buf.array(), freeSpace + bytesWritten, childPtrSize);
- int tupleSize = bytesWritten + childPtrSize;
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
- System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
- }
-
- @Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey) throws Exception {
- // before doing anything check if key already exists
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- if(tupleIndex >= 0) {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
- if(cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key in interior node during split");
- }
- }
-
- ByteBuffer right = rightFrame.getBuffer();
- int tupleCount = buf.getInt(tupleCountOff);
-
- int tuplesToLeft = (tupleCount / 2) + (tupleCount % 2);
- IBTreeFrame targetFrame = null;
- frameTuple.resetByOffset(buf, getTupleOffset(tuplesToLeft-1));
- if(cmp.compare(tuple, frameTuple) <= 0) {
- targetFrame = this;
- }
- else {
- targetFrame = rightFrame;
- }
- int tuplesToRight = tupleCount - tuplesToLeft;
-
- // copy entire page
- System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
- // on right page we need to copy rightmost slots to left
- int src = rightFrame.getSlotManager().getSlotEndOff();
- int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft * rightFrame.getSlotManager().getSlotSize();
- int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
- System.arraycopy(right.array(), src, right.array(), dest, length);
- right.putInt(tupleCountOff, tuplesToRight);
-
- // on left page, remove highest key and make its childpointer the rightmost childpointer
- buf.putInt(tupleCountOff, tuplesToLeft);
-
- // copy data to be inserted, we need this because creating the splitkey will overwrite the data param (data points to same memory as splitKey.getData())
- SplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
-
- // set split key to be highest value in left page
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- splitKey.initData(splitKeySize);
- tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
-
- int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, deleteTupleOff);
- buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple, cmp)));
- buf.putInt(tupleCountOff, tuplesToLeft - 1);
-
- // compact both pages
- rightFrame.compact(cmp);
- compact(cmp);
-
- // insert key
- targetFrame.insert(savedSplitKey.getTuple(), cmp);
-
- return 0;
- }
-
- @Override
- public void compact(MultiComparator cmp) {
- resetSpaceParams();
-
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
-
- int tupleCount = buf.getInt(tupleCountOff);
- int freeSpace = buf.getInt(freeSpaceOff);
-
- ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
- sortedTupleOffs.ensureCapacity(tupleCount);
- for(int i = 0; i < tupleCount; i++) {
- int slotOff = slotManager.getSlotOff(i);
- int tupleOff = slotManager.getTupleOff(slotOff);
- sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
- }
- Collections.sort(sortedTupleOffs);
-
- for(int i = 0; i < sortedTupleOffs.size(); i++) {
- int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByOffset(buf, tupleOff);
-
- int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount()-1) + frameTuple.getFieldLength(frameTuple.getFieldCount()-1);
- int tupleLength = tupleEndOff - tupleOff + childPtrSize;
- System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
-
- slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
- freeSpace += tupleLength;
- }
-
- buf.putInt(freeSpaceOff, freeSpace);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
- }
-
- @Override
- public int getChildPageId(RangePredicate pred, MultiComparator srcCmp) {
- // check for trivial case where there is only a child pointer (and no key)
- if(buf.getInt(tupleCountOff) == 0) {
- return buf.getInt(rightLeafOff);
- }
-
- cmpFrameTuple.setFieldCount(srcCmp.getKeyFieldCount());
- frameTuple.setFieldCount(srcCmp.getKeyFieldCount());
-
- // check for trivial cases where no low key or high key exists (e.g. during an index scan)
- ITupleReference tuple = null;
- FindTupleMode fsm = null;
- MultiComparator targetCmp = null;
- if(pred.isForward()) {
- tuple = pred.getLowKey();
- if(tuple == null) {
- return getLeftmostChildPageId(srcCmp);
- }
- if(pred.isLowKeyInclusive()) fsm = FindTupleMode.FTM_INCLUSIVE;
- else fsm = FindTupleMode.FTM_EXCLUSIVE;
- targetCmp = pred.getLowKeyComparator();
- }
- else {
- tuple = pred.getHighKey();
- if(tuple == null) {
- return getRightmostChildPageId(srcCmp);
- }
- if(pred.isHighKeyInclusive()) fsm = FindTupleMode.FTM_EXCLUSIVE;
- else fsm = FindTupleMode.FTM_INCLUSIVE;
- targetCmp = pred.getHighKeyComparator();
- }
-
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- if(tupleIndex < 0) {
- return buf.getInt(rightLeafOff);
- }
- else {
- int origTupleOff = slotManager.getTupleOff(slotOff);
- cmpFrameTuple.resetByOffset(buf, origTupleOff);
- int cmpTupleOff = origTupleOff;
- if(pred.isForward()) {
- int maxSlotOff = buf.capacity();
- slotOff += slotManager.getSlotSize();
- while(slotOff < maxSlotOff) {
- cmpTupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, cmpTupleOff);
- if(targetCmp.compare(cmpFrameTuple, frameTuple) != 0) break;
- slotOff += slotManager.getSlotSize();
- }
- slotOff -= slotManager.getSlotSize();
- }
- else {
- int minSlotOff = slotManager.getSlotEndOff() - slotManager.getSlotSize();
- slotOff -= slotManager.getSlotSize();
- while(slotOff > minSlotOff) {
- cmpTupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, cmpTupleOff);
- if(targetCmp.compare(cmpFrameTuple, frameTuple) != 0) break;
- slotOff -= slotManager.getSlotSize();
- }
- slotOff += slotManager.getSlotSize();
- }
-
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
- int childPageOff = getLeftChildPageOff(frameTuple, srcCmp);
- return buf.getInt(childPageOff);
- }
- }
-
- @Override
- public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- int tupleOff;
- int keySize;
-
- if(tupleIndex < 0) {
- tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
- keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
-
- // copy new rightmost pointer
- System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
- }
- else {
- tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
- keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- // perform deletion (we just do a memcpy to overwrite the slot)
- int slotStartOff = slotManager.getSlotEndOff();
- int length = slotOff - slotStartOff;
- System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
- }
-
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
- }
-
- @Override
- protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, rightLeafOff + childPtrSize);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightLeafOff + childPtrSize));
- }
-
- @Override
- public int getLeftmostChildPageId(MultiComparator cmp) {
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByOffset(buf, tupleOff);
- int childPageOff = getLeftChildPageOff(frameTuple, cmp);
- return buf.getInt(childPageOff);
- }
- @Override
- public int getRightmostChildPageId(MultiComparator cmp) {
- return buf.getInt(rightLeafOff);
- }
+ private static final int rightLeafOff = smFlagOff + 1;
- @Override
- public void setRightmostChildPageId(int pageId) {
- buf.putInt(rightLeafOff, pageId);
- }
-
- // for debugging
- public ArrayList<Integer> getChildren(MultiComparator cmp) {
- ArrayList<Integer> ret = new ArrayList<Integer>();
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleCount = buf.getInt(tupleCountOff);
- for(int i = 0; i < tupleCount; i++) {
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
- frameTuple.resetByOffset(buf, tupleOff);
- int intVal = getInt(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount()-1) + frameTuple.getFieldLength(frameTuple.getFieldCount()-1));
- ret.add(intVal);
- }
- if(!isLeaf()) {
- int rightLeaf = buf.getInt(rightLeafOff);
- if(rightLeaf > 0) ret.add(buf.getInt(rightLeafOff));
- }
- return ret;
- }
+ private static final int childPtrSize = 4;
- @Override
- public void deleteGreatest(MultiComparator cmp) {
- int slotOff = slotManager.getSlotEndOff();
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByOffset(buf, tupleOff);
- int keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
-
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
-
- int freeSpace = buf.getInt(freeSpaceOff);
- if(freeSpace == tupleOff + keySize + childPtrSize) {
- buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
- }
- }
-
- private int getInt(byte[] bytes, int offset) {
- return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8) + ((bytes[offset + 3] & 0xff) << 0);
- }
-
- @Override
- public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- int tupleCount = buf.getInt(tupleCountOff);
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- for(int i = 0; i < tupleCount; i++) {
- frameTuple.resetByTupleIndex(this, i);
- for(int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[j].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- strBuilder.append(" | ");
- }
- strBuilder.append("\n");
- return strBuilder.toString();
- }
+ // private SimpleTupleReference cmpFrameTuple = new SimpleTupleReference();
+ private IBTreeTupleReference cmpFrameTuple;
+
+ public NSMInteriorFrame(IBTreeTupleWriter tupleWriter) {
+ super(tupleWriter);
+ cmpFrameTuple = tupleWriter.createTupleReference();
+
+ }
+
+ private int getLeftChildPageOff(ITupleReference tuple, MultiComparator cmp) {
+ return tuple.getFieldStart(cmp.getKeyFieldCount() - 1) + tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
+ }
+
+ @Override
+ public void initBuffer(byte level) {
+ super.initBuffer(level);
+ buf.putInt(rightLeafOff, -1);
+ }
+
+ @Override
+ public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int bytesRequired = tupleWriter.bytesRequired(tuple) + 8; // for the two
+ // childpointers
+ if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+ return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+ return SpaceStatus.SUFFICIENT_SPACE;
+ else
+ return SpaceStatus.INSUFFICIENT_SPACE;
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ boolean isDuplicate = true;
+
+ if (tupleIndex < 0)
+ isDuplicate = false; // greater than all existing keys
+ else {
+ frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ if (cmp.compare(tuple, frameTuple) != 0)
+ isDuplicate = false;
+ }
+
+ if (isDuplicate) {
+ throw new BTreeException("Trying to insert duplicate value into interior node.");
+ } else {
+ slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf
+ .array(), freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+
+ // did insert into the rightmost slot?
+ if (slotOff == slotManager.getSlotEndOff()) {
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
+ + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
+ } else {
+ // if slotOff has a right (slot-)neighbor then update its child
+ // pointer
+ // the only time when this is NOT the case, is when this is the
+ // first tuple
+ // (or when the splitkey goes into the rightmost slot but that
+ // case was handled in the if above)
+ if (buf.getInt(tupleCountOff) > 1) {
+ int rightNeighborOff = slotOff - slotManager.getSlotSize();
+ frameTuple.resetByOffset(buf, slotManager.getTupleOff(rightNeighborOff));
+ System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize,
+ buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ int freeSpace = buf.getInt(freeSpaceOff);
+ slotManager.insertSlot(-1, freeSpace);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf.array(),
+ freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+ System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(),
+ rightLeafOff, childPtrSize);
+ }
+
+ @Override
+ public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ throws Exception {
+ // before doing anything check if key already exists
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ if (tupleIndex >= 0) {
+ frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ if (cmp.compare(tuple, frameTuple) == 0) {
+ throw new BTreeException("Inserting duplicate key in interior node during split");
+ }
+ }
+
+ ByteBuffer right = rightFrame.getBuffer();
+ int tupleCount = buf.getInt(tupleCountOff);
+
+ int tuplesToLeft = (tupleCount / 2) + (tupleCount % 2);
+ IBTreeFrame targetFrame = null;
+ frameTuple.resetByOffset(buf, getTupleOffset(tuplesToLeft - 1));
+ if (cmp.compare(tuple, frameTuple) <= 0) {
+ targetFrame = this;
+ } else {
+ targetFrame = rightFrame;
+ }
+ int tuplesToRight = tupleCount - tuplesToLeft;
+
+ // copy entire page
+ System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+ // on right page we need to copy rightmost slots to left
+ int src = rightFrame.getSlotManager().getSlotEndOff();
+ int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
+ * rightFrame.getSlotManager().getSlotSize();
+ int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
+ System.arraycopy(right.array(), src, right.array(), dest, length);
+ right.putInt(tupleCountOff, tuplesToRight);
+
+ // on left page, remove highest key and make its childpointer the
+ // rightmost childpointer
+ buf.putInt(tupleCountOff, tuplesToLeft);
+
+ // copy data to be inserted, we need this because creating the splitkey
+ // will overwrite the data param (data points to same memory as
+ // splitKey.getData())
+ SplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
+
+ // set split key to be highest value in left page
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+ splitKey.initData(splitKeySize);
+ tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
+ splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
+
+ int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByOffset(buf, deleteTupleOff);
+ buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple, cmp)));
+ buf.putInt(tupleCountOff, tuplesToLeft - 1);
+
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
+
+ // insert key
+ targetFrame.insert(savedSplitKey.getTuple(), cmp);
+
+ return 0;
+ }
+
+ @Override
+ public void compact(MultiComparator cmp) {
+ resetSpaceParams();
+
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+
+ int tupleCount = buf.getInt(tupleCountOff);
+ int freeSpace = buf.getInt(freeSpaceOff);
+
+ ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+ sortedTupleOffs.ensureCapacity(tupleCount);
+ for (int i = 0; i < tupleCount; i++) {
+ int slotOff = slotManager.getSlotOff(i);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+ }
+ Collections.sort(sortedTupleOffs);
+
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ frameTuple.resetByOffset(buf, tupleOff);
+
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+ int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+ System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+ freeSpace += tupleLength;
+ }
+
+ buf.putInt(freeSpaceOff, freeSpace);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+ }
+
+ @Override
+ public int getChildPageId(RangePredicate pred, MultiComparator srcCmp) {
+ // check for trivial case where there is only a child pointer (and no
+ // key)
+ if (buf.getInt(tupleCountOff) == 0) {
+ return buf.getInt(rightLeafOff);
+ }
+
+ cmpFrameTuple.setFieldCount(srcCmp.getKeyFieldCount());
+ frameTuple.setFieldCount(srcCmp.getKeyFieldCount());
+
+ // check for trivial cases where no low key or high key exists (e.g.
+ // during an index scan)
+ ITupleReference tuple = null;
+ FindTupleMode fsm = null;
+ MultiComparator targetCmp = null;
+ if (pred.isForward()) {
+ tuple = pred.getLowKey();
+ if (tuple == null) {
+ return getLeftmostChildPageId(srcCmp);
+ }
+ if (pred.isLowKeyInclusive())
+ fsm = FindTupleMode.FTM_INCLUSIVE;
+ else
+ fsm = FindTupleMode.FTM_EXCLUSIVE;
+ targetCmp = pred.getLowKeyComparator();
+ } else {
+ tuple = pred.getHighKey();
+ if (tuple == null) {
+ return getRightmostChildPageId(srcCmp);
+ }
+ if (pred.isHighKeyInclusive())
+ fsm = FindTupleMode.FTM_EXCLUSIVE;
+ else
+ fsm = FindTupleMode.FTM_INCLUSIVE;
+ targetCmp = pred.getHighKeyComparator();
+ }
+
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, targetCmp, fsm,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ if (tupleIndex < 0) {
+ return buf.getInt(rightLeafOff);
+ } else {
+ int origTupleOff = slotManager.getTupleOff(slotOff);
+ cmpFrameTuple.resetByOffset(buf, origTupleOff);
+ int cmpTupleOff = origTupleOff;
+ if (pred.isForward()) {
+ int maxSlotOff = buf.capacity();
+ slotOff += slotManager.getSlotSize();
+ while (slotOff < maxSlotOff) {
+ cmpTupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByOffset(buf, cmpTupleOff);
+ if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
+ break;
+ slotOff += slotManager.getSlotSize();
+ }
+ slotOff -= slotManager.getSlotSize();
+ } else {
+ int minSlotOff = slotManager.getSlotEndOff() - slotManager.getSlotSize();
+ slotOff -= slotManager.getSlotSize();
+ while (slotOff > minSlotOff) {
+ cmpTupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByOffset(buf, cmpTupleOff);
+ if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
+ break;
+ slotOff -= slotManager.getSlotSize();
+ }
+ slotOff += slotManager.getSlotSize();
+ }
+
+ frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ int childPageOff = getLeftChildPageOff(frameTuple, srcCmp);
+ return buf.getInt(childPageOff);
+ }
+ }
+
+ @Override
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ int tupleOff;
+ int keySize;
+
+ if (tupleIndex < 0) {
+ tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByOffset(buf, tupleOff);
+ keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+
+ // copy new rightmost pointer
+ System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+ } else {
+ tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByOffset(buf, tupleOff);
+ keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+ }
+
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
+ + slotManager.getSlotSize());
+ }
+
+ @Override
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, rightLeafOff + childPtrSize);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightLeafOff + childPtrSize));
+ }
+
+ @Override
+ public int getLeftmostChildPageId(MultiComparator cmp) {
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByOffset(buf, tupleOff);
+ int childPageOff = getLeftChildPageOff(frameTuple, cmp);
+ return buf.getInt(childPageOff);
+ }
+
+ @Override
+ public int getRightmostChildPageId(MultiComparator cmp) {
+ return buf.getInt(rightLeafOff);
+ }
+
+ @Override
+ public void setRightmostChildPageId(int pageId) {
+ buf.putInt(rightLeafOff, pageId);
+ }
+
+ // for debugging
+ public ArrayList<Integer> getChildren(MultiComparator cmp) {
+ ArrayList<Integer> ret = new ArrayList<Integer>();
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int tupleCount = buf.getInt(tupleCountOff);
+ for (int i = 0; i < tupleCount; i++) {
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
+ frameTuple.resetByOffset(buf, tupleOff);
+ int intVal = getInt(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+ ret.add(intVal);
+ }
+ if (!isLeaf()) {
+ int rightLeaf = buf.getInt(rightLeafOff);
+ if (rightLeaf > 0)
+ ret.add(buf.getInt(rightLeafOff));
+ }
+ return ret;
+ }
+
+ @Override
+ public void deleteGreatest(MultiComparator cmp) {
+ int slotOff = slotManager.getSlotEndOff();
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByOffset(buf, tupleOff);
+ int keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+ System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
+
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
+ + slotManager.getSlotSize());
+
+ int freeSpace = buf.getInt(freeSpaceOff);
+ if (freeSpace == tupleOff + keySize + childPtrSize) {
+ buf.putInt(freeSpace, freeSpace - (keySize + childPtrSize));
+ }
+ }
+
+ private int getInt(byte[] bytes, int offset) {
+ return ((bytes[offset] & 0xff) << 24) + ((bytes[offset + 1] & 0xff) << 16) + ((bytes[offset + 2] & 0xff) << 8)
+ + ((bytes[offset + 3] & 0xff) << 0);
+ }
+
+ @Override
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
+ .getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java
index 8dd7175..86fa94f 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java
@@ -20,16 +20,16 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
public class NSMInteriorFrameFactory implements IBTreeInteriorFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
-
- public NSMInteriorFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
- this.tupleWriterFactory = tupleWriterFactory;
- }
-
- @Override
- public IBTreeInteriorFrame getFrame() {
- return new NSMInteriorFrame(tupleWriterFactory.createTupleWriter());
- }
+
+ private static final long serialVersionUID = 1L;
+ private IBTreeTupleWriterFactory tupleWriterFactory;
+
+ public NSMInteriorFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ }
+
+ @Override
+ public IBTreeInteriorFrame getFrame() {
+ return new NSMInteriorFrame(tupleWriterFactory.createTupleWriter());
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java
index 2dd6bc0..fd8f826 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java
@@ -17,6 +17,7 @@
import java.nio.ByteBuffer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -28,155 +29,161 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
-public class NSMLeafFrame extends NSMFrame implements IBTreeLeafFrame {
- protected static final int prevLeafOff = smFlagOff + 1;
- protected static final int nextLeafOff = prevLeafOff + 4;
-
- public NSMLeafFrame(IBTreeTupleWriter tupleWriter) {
- super(tupleWriter);
- }
-
- @Override
- public void initBuffer(byte level) {
- super.initBuffer(level);
- buf.putInt(prevLeafOff, -1);
- buf.putInt(nextLeafOff, -1);
- }
+public class NSMLeafFrame extends NSMFrame implements IBTreeLeafFrame {
+ protected static final int prevLeafOff = smFlagOff + 1;
+ protected static final int nextLeafOff = prevLeafOff + 4;
- @Override
- public void setNextLeaf(int page) {
- buf.putInt(nextLeafOff, page);
- }
+ public NSMLeafFrame(IBTreeTupleWriter tupleWriter) {
+ super(tupleWriter);
+ }
- @Override
- public void setPrevLeaf(int page) {
- buf.putInt(prevLeafOff, page);
- }
+ @Override
+ public void initBuffer(byte level) {
+ super.initBuffer(level);
+ buf.putInt(prevLeafOff, -1);
+ buf.putInt(nextLeafOff, -1);
+ }
- @Override
- public int getNextLeaf() {
- return buf.getInt(nextLeafOff);
- }
+ @Override
+ public void setNextLeaf(int page) {
+ buf.putInt(nextLeafOff, page);
+ }
- @Override
- public int getPrevLeaf() {
- return buf.getInt(prevLeafOff);
- }
+ @Override
+ public void setPrevLeaf(int page) {
+ buf.putInt(prevLeafOff, page);
+ }
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- boolean isDuplicate = true;
-
- if (tupleIndex < 0) isDuplicate = false; // greater than all existing keys
- else {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
- if (cmp.compare(tuple, frameTuple) != 0) isDuplicate = false;
- }
-
- if (isDuplicate) {
- throw new BTreeException("Trying to insert duplicate value into leaf of unique index");
- }
- else {
- slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
-
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
- }
- }
-
- @Override
- public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws Exception {
- int freeSpace = buf.getInt(freeSpaceOff);
- slotManager.insertSlot(-1, freeSpace);
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
- }
-
- @Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey) throws Exception {
-
- frameTuple.setFieldCount(cmp.getFieldCount());
-
- // before doing anything check if key already exists
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- if (tupleIndex >= 0) {
- frameTuple.resetByTupleIndex(this, tupleIndex);
- if (cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key into unique index");
- }
- }
-
- ByteBuffer right = rightFrame.getBuffer();
- int tupleCount = getTupleCount();
+ @Override
+ public int getNextLeaf() {
+ return buf.getInt(nextLeafOff);
+ }
- int tuplesToLeft;
- int mid = tupleCount / 2;
- IBTreeFrame targetFrame = null;
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff() + slotManager.getSlotSize() * mid);
- frameTuple.resetByOffset(buf, tupleOff);
- if (cmp.compare(tuple, frameTuple) >= 0) {
- tuplesToLeft = mid + (tupleCount % 2);
- targetFrame = rightFrame;
- } else {
- tuplesToLeft = mid;
- targetFrame = this;
- }
- int tuplesToRight = tupleCount - tuplesToLeft;
-
- // copy entire page
- System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
-
- // on right page we need to copy rightmost slots to left
- int src = rightFrame.getSlotManager().getSlotEndOff();
- int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft * rightFrame.getSlotManager().getSlotSize();
- int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
- System.arraycopy(right.array(), src, right.array(), dest, length);
- right.putInt(tupleCountOff, tuplesToRight);
-
- // on left page only change the tupleCount indicator
- buf.putInt(tupleCountOff, tuplesToLeft);
-
- // compact both pages
- rightFrame.compact(cmp);
- compact(cmp);
-
- // insert last key
- targetFrame.insert(tuple, cmp);
-
- // set split key to be highest value in left page
- tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
-
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- splitKey.initData(splitKeySize);
- tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
-
- return 0;
- }
+ @Override
+ public int getPrevLeaf() {
+ return buf.getInt(prevLeafOff);
+ }
- @Override
- protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, nextLeafOff + 4);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (nextLeafOff + 4));
- }
-
- @Override
- public IBTreeTupleReference createTupleReference() {
- return tupleWriter.createTupleReference();
- }
-
- @Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp, FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
- return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
- }
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ boolean isDuplicate = true;
+
+ if (tupleIndex < 0)
+ isDuplicate = false; // greater than all existing keys
+ else {
+ frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ if (cmp.compare(tuple, frameTuple) != 0)
+ isDuplicate = false;
+ }
+
+ if (isDuplicate) {
+ throw new BTreeException("Trying to insert duplicate value into leaf of unique index");
+ } else {
+ slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
+ }
+
+ @Override
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ int freeSpace = buf.getInt(freeSpaceOff);
+ slotManager.insertSlot(-1, freeSpace);
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
+
+ @Override
+ public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ throws Exception {
+
+ frameTuple.setFieldCount(cmp.getFieldCount());
+
+ // before doing anything check if key already exists
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ if (tupleIndex >= 0) {
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ if (cmp.compare(tuple, frameTuple) == 0) {
+ throw new BTreeException("Inserting duplicate key into unique index");
+ }
+ }
+
+ ByteBuffer right = rightFrame.getBuffer();
+ int tupleCount = getTupleCount();
+
+ int tuplesToLeft;
+ int mid = tupleCount / 2;
+ IBTreeFrame targetFrame = null;
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff() + slotManager.getSlotSize() * mid);
+ frameTuple.resetByOffset(buf, tupleOff);
+ if (cmp.compare(tuple, frameTuple) >= 0) {
+ tuplesToLeft = mid + (tupleCount % 2);
+ targetFrame = rightFrame;
+ } else {
+ tuplesToLeft = mid;
+ targetFrame = this;
+ }
+ int tuplesToRight = tupleCount - tuplesToLeft;
+
+ // copy entire page
+ System.arraycopy(buf.array(), 0, right.array(), 0, buf.capacity());
+
+ // on right page we need to copy rightmost slots to left
+ int src = rightFrame.getSlotManager().getSlotEndOff();
+ int dest = rightFrame.getSlotManager().getSlotEndOff() + tuplesToLeft
+ * rightFrame.getSlotManager().getSlotSize();
+ int length = rightFrame.getSlotManager().getSlotSize() * tuplesToRight;
+ System.arraycopy(right.array(), src, right.array(), dest, length);
+ right.putInt(tupleCountOff, tuplesToRight);
+
+ // on left page only change the tupleCount indicator
+ buf.putInt(tupleCountOff, tuplesToLeft);
+
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
+
+ // insert last key
+ targetFrame.insert(tuple, cmp);
+
+ // set split key to be highest value in left page
+ tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByOffset(buf, tupleOff);
+
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+ splitKey.initData(splitKeySize);
+ tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
+ splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
+
+ return 0;
+ }
+
+ @Override
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, nextLeafOff + 4);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (nextLeafOff + 4));
+ }
+
+ @Override
+ public IBTreeTupleReference createTupleReference() {
+ return tupleWriter.createTupleReference();
+ }
+
+ @Override
+ public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
+ return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
index 33316a9..202f31a 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
@@ -20,16 +20,16 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
public class NSMLeafFrameFactory implements IBTreeLeafFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
-
- public NSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
- this.tupleWriterFactory = tupleWriterFactory;
- }
-
- @Override
- public IBTreeLeafFrame getFrame() {
- return new NSMLeafFrame(tupleWriterFactory.createTupleWriter());
- }
+
+ private static final long serialVersionUID = 1L;
+ private IBTreeTupleWriterFactory tupleWriterFactory;
+
+ public NSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ }
+
+ @Override
+ public IBTreeLeafFrame getFrame() {
+ return new NSMLeafFrame(tupleWriterFactory.createTupleWriter());
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index fc73571..0c47e12 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -22,8 +22,6 @@
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
@@ -35,27 +33,28 @@
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrame;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
public class BTree {
-
+
private final static int RESTART_OP = Integer.MIN_VALUE;
private final static int MAX_RESTARTS = 10;
-
- private final int metaDataPage = 0; // page containing meta data, e.g., maxPage
+
+ private final int metaDataPage = 0; // page containing meta data, e.g.,
+ // maxPage
private final int rootPage = 1; // the root page never changes
-
+
private boolean created = false;
private boolean loaded = false;
-
+
private final IBufferCache bufferCache;
private int fileId;
private final IBTreeInteriorFrameFactory interiorFrameFactory;
- private final IBTreeLeafFrameFactory leafFrameFactory;
+ private final IBTreeLeafFrameFactory leafFrameFactory;
private final MultiComparator cmp;
- private final ReadWriteLock treeLatch;
+ private final ReadWriteLock treeLatch;
private final RangePredicate diskOrderScanPredicate;
-
+
public int rootSplits = 0;
public int[] splitsByLevel = new int[500];
public long readLatchesAcquired = 0;
@@ -72,12 +71,11 @@
public int usefulCompression = 0;
public int uselessCompression = 0;
-
-
+
public void treeLatchStatus() {
- System.out.println(treeLatch.writeLock().toString());
+ System.out.println(treeLatch.writeLock().toString());
}
-
+
public String printStats() {
StringBuilder strBuilder = new StringBuilder();
strBuilder.append("\n");
@@ -96,61 +94,62 @@
IBTreeLeafFrameFactory leafFrameFactory, MultiComparator cmp) {
this.bufferCache = bufferCache;
this.interiorFrameFactory = interiorFrameFactory;
- this.leafFrameFactory = leafFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
this.cmp = cmp;
this.treeLatch = new ReentrantReadWriteLock(true);
- this.diskOrderScanPredicate = new RangePredicate(true, null, null, true, true, cmp, cmp);
+ this.diskOrderScanPredicate = new RangePredicate(true, null, null, true, true, cmp, cmp);
}
-
+
public void create(int fileId, IBTreeLeafFrame leafFrame, IBTreeMetaDataFrame metaFrame) throws Exception {
- if(created) return;
-
- treeLatch.writeLock().lock();
- try {
-
- // check if another thread beat us to it
- if(created) return;
-
- // initialize meta data page
- ICachedPage metaNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- metaFrame.setPage(metaNode);
- metaFrame.initBuffer((byte) -1);
- metaFrame.setMaxPage(rootPage);
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(metaNode);
- unpins++;
- }
+ if (created)
+ return;
- // initialize root page
- ICachedPage rootNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, rootPage), true);
- pins++;
+ treeLatch.writeLock().lock();
+ try {
- rootNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- leafFrame.setPage(rootNode);
- leafFrame.initBuffer((byte) 0);
- } finally {
- rootNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rootNode);
- unpins++;
- }
- currentLevel = 0;
-
- created = true;
- }
- finally {
- treeLatch.writeLock().unlock();
- }
+ // check if another thread beat us to it
+ if (created)
+ return;
+
+ // initialize meta data page
+ ICachedPage metaNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, metaDataPage), false);
+ pins++;
+
+ metaNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ metaFrame.setPage(metaNode);
+ metaFrame.initBuffer((byte) -1);
+ metaFrame.setMaxPage(rootPage);
+ } finally {
+ metaNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(metaNode);
+ unpins++;
+ }
+
+ // initialize root page
+ ICachedPage rootNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, rootPage), true);
+ pins++;
+
+ rootNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ leafFrame.setPage(rootNode);
+ leafFrame.initBuffer((byte) 0);
+ } finally {
+ rootNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rootNode);
+ unpins++;
+ }
+ currentLevel = 0;
+
+ created = true;
+ } finally {
+ treeLatch.writeLock().unlock();
+ }
}
public void open(int fileId) {
@@ -161,13 +160,13 @@
fileId = -1;
}
- private int getFreePage(IBTreeMetaDataFrame metaFrame) throws Exception {
- ICachedPage metaNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, metaDataPage), false);
+ private int getFreePage(IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
+ ICachedPage metaNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, metaDataPage), false);
pins++;
-
+
metaNode.acquireWriteLatch();
writeLatchesAcquired++;
-
+
int freePage = -1;
try {
metaFrame.setPage(metaNode);
@@ -175,9 +174,9 @@
if (freePage < 0) { // no free page entry on this page
int nextPage = metaFrame.getNextPage();
if (nextPage > 0) { // sibling may have free pages
- ICachedPage nextNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, nextPage), false);
+ ICachedPage nextNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, nextPage), false);
pins++;
-
+
nextNode.acquireWriteLatch();
writeLatchesAcquired++;
// we copy over the free space entries of nextpage into the
@@ -191,8 +190,9 @@
// copy entire page (including sibling pointer, free
// page entries, and all other info)
// after this copy nextPage is considered a free page
- System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode.getBuffer().capacity());
-
+ System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode
+ .getBuffer().capacity());
+
// reset unchanged entry
metaFrame.setMaxPage(maxPage);
@@ -225,9 +225,9 @@
return freePage;
}
-
+
private void addFreePages(BTreeOpContext ctx) throws Exception {
- for(int i = 0; i < ctx.freePages.size(); i++) {
+ for (int i = 0; i < ctx.freePages.size(); i++) {
addFreePage(ctx.metaFrame, ctx.freePages.get(i));
}
ctx.freePages.clear();
@@ -235,167 +235,168 @@
private void addFreePage(IBTreeMetaDataFrame metaFrame, int freePage) throws Exception {
// root page is special, don't add it to free pages
- if (freePage == rootPage) return;
-
- ICachedPage metaNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, metaDataPage), false);
+ if (freePage == rootPage)
+ return;
+
+ ICachedPage metaNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, metaDataPage), false);
pins++;
-
+
metaNode.acquireWriteLatch();
writeLatchesAcquired++;
-
+
metaFrame.setPage(metaNode);
try {
- if(metaFrame.hasSpace()) {
- metaFrame.addFreePage(freePage);
- }
- else {
+ if (metaFrame.hasSpace()) {
+ metaFrame.addFreePage(freePage);
+ } else {
// allocate a new page in the chain of meta pages
int newPage = metaFrame.getFreePage();
- if(newPage < 0) {
+ if (newPage < 0) {
throw new Exception("Inconsistent Meta Page State. It has no space, but it also has no entries.");
}
-
- ICachedPage newNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, newPage), false);
+
+ ICachedPage newNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, newPage), false);
pins++;
-
+
newNode.acquireWriteLatch();
writeLatchesAcquired++;
-
-
- try {
+
+ try {
int metaMaxPage = metaFrame.getMaxPage();
// copy metaDataPage to newNode
- System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode.getBuffer().capacity());
-
+ System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode
+ .getBuffer().capacity());
+
metaFrame.initBuffer(-1);
metaFrame.setNextPage(newPage);
metaFrame.setMaxPage(metaMaxPage);
- metaFrame.addFreePage(freePage);
+ metaFrame.addFreePage(freePage);
} finally {
newNode.releaseWriteLatch();
writeLatchesReleased++;
-
+
bufferCache.unpin(newNode);
unpins++;
}
- }
+ }
} catch (Exception e) {
e.printStackTrace();
} finally {
metaNode.releaseWriteLatch();
writeLatchesReleased++;
-
+
bufferCache.unpin(metaNode);
unpins++;
}
}
-
+
public int getMaxPage(IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
- int maxPage = -1;
- try {
- metaFrame.setPage(metaNode);
- maxPage = metaFrame.getMaxPage();
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(metaNode);
- unpins++;
- }
-
- return maxPage;
+ ICachedPage metaNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, metaDataPage), false);
+ pins++;
+
+ metaNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ int maxPage = -1;
+ try {
+ metaFrame.setPage(metaNode);
+ maxPage = metaFrame.getMaxPage();
+ } finally {
+ metaNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(metaNode);
+ unpins++;
+ }
+
+ return maxPage;
}
-
- public void printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
+
+ public void printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields)
+ throws Exception {
printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
}
public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
-
- ICachedPage node = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+
+ ICachedPage node = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
node.acquireReadLatch();
readLatchesAcquired++;
-
+
try {
if (parent != null && unpin == true) {
parent.releaseReadLatch();
readLatchesReleased++;
-
+
bufferCache.unpin(parent);
unpins++;
}
interiorFrame.setPage(node);
int level = interiorFrame.getLevel();
-
+
System.out.format("%1d ", level);
System.out.format("%3d ", pageId);
for (int i = 0; i < currentLevel - level; i++)
- System.out.format(" ");
+ System.out.format(" ");
String keyString;
- if(interiorFrame.isLeaf()) {
- leafFrame.setPage(node);
- keyString = leafFrame.printKeys(cmp, fields);
+ if (interiorFrame.isLeaf()) {
+ leafFrame.setPage(node);
+ keyString = leafFrame.printKeys(cmp, fields);
+ } else {
+ keyString = interiorFrame.printKeys(cmp, fields);
}
- else {
- keyString = interiorFrame.printKeys(cmp, fields);
- }
-
+
System.out.format(keyString);
if (!interiorFrame.isLeaf()) {
- ArrayList<Integer> children = ((NSMInteriorFrame) (interiorFrame)).getChildren(cmp);
-
+ ArrayList<Integer> children = ((NSMInteriorFrame) (interiorFrame)).getChildren(cmp);
+
for (int i = 0; i < children.size(); i++) {
printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
}
} else {
node.releaseReadLatch();
readLatchesReleased++;
-
+
bufferCache.unpin(node);
unpins++;
}
} catch (Exception e) {
node.releaseReadLatch();
readLatchesReleased++;
-
+
bufferCache.unpin(node);
unpins++;
e.printStackTrace();
}
}
- public void diskOrderScan(DiskOrderScanCursor cursor, IBTreeLeafFrame leafFrame, IBTreeMetaDataFrame metaFrame) throws Exception {
+ public void diskOrderScan(DiskOrderScanCursor cursor, IBTreeLeafFrame leafFrame, IBTreeMetaDataFrame metaFrame)
+ throws HyracksDataException {
int currentPageId = rootPage + 1;
int maxPageId = -1;
-
- ICachedPage metaNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, metaDataPage), false);
+
+ ICachedPage metaNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, metaDataPage), false);
pins++;
-
+
metaNode.acquireReadLatch();
readLatchesAcquired++;
-
+
try {
metaFrame.setPage(metaNode);
maxPageId = metaFrame.getMaxPage();
} finally {
metaNode.releaseReadLatch();
readLatchesAcquired++;
-
+
bufferCache.unpin(metaNode);
unpins++;
}
-
- ICachedPage page = bufferCache.pin(FileInfo.getDiskPageId(fileId, currentPageId), false);
+
+ ICachedPage page = bufferCache.pin(FileHandle.getDiskPageId(fileId, currentPageId), false);
page.acquireReadLatch();
cursor.setBufferCache(bufferCache);
cursor.setFileId(fileId);
@@ -403,17 +404,17 @@
cursor.setMaxPageId(maxPageId);
cursor.open(page, diskOrderScanPredicate);
}
-
- public void search(IBTreeCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
- ctx.reset();
- ctx.pred = pred;
- ctx.cursor = cursor;
+
+ public void search(IBTreeCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.pred = pred;
+ ctx.cursor = cursor;
// simple index scan
if (ctx.pred.getLowKeyComparator() == null)
ctx.pred.setLowKeyComparator(cmp);
if (ctx.pred.getHighKeyComparator() == null)
ctx.pred.setHighKeyComparator(cmp);
-
+
boolean repeatOp = true;
// we use this loop to deal with possibly multiple operation restarts
// due to ongoing structure modifications during the descent
@@ -434,13 +435,14 @@
cursor.setFileId(fileId);
}
- private void unsetSmPages(BTreeOpContext ctx) throws Exception {
+ private void unsetSmPages(BTreeOpContext ctx) throws HyracksDataException {
ICachedPage originalPage = ctx.interiorFrame.getPage();
- for(int i = 0; i < ctx.smPages.size(); i++) {
+ for (int i = 0; i < ctx.smPages.size(); i++) {
int pageId = ctx.smPages.get(i);
- ICachedPage smPage = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+ ICachedPage smPage = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
- smPage.acquireWriteLatch(); // TODO: would like to set page dirty without latching
+ smPage.acquireWriteLatch(); // TODO: would like to set page dirty
+ // without latching
writeLatchesAcquired++;
try {
ctx.interiorFrame.setPage(smPage);
@@ -466,20 +468,25 @@
currentLevel++;
// make sure the root is always at the same level
- ICachedPage leftNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
+ ICachedPage leftNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
pins++;
- leftNode.acquireWriteLatch(); // TODO: think about whether latching is really required
+ leftNode.acquireWriteLatch(); // TODO: think about whether latching is
+ // really required
writeLatchesAcquired++;
try {
- ICachedPage rightNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, ctx.splitKey.getRightPage()), false);
+ ICachedPage rightNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, ctx.splitKey.getRightPage()),
+ false);
pins++;
- rightNode.acquireWriteLatch(); // TODO: think about whether latching is really required
+ rightNode.acquireWriteLatch(); // TODO: think about whether latching
+ // is really required
writeLatchesAcquired++;
try {
int newLeftId = getFreePage(ctx.metaFrame);
- ICachedPage newLeftNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, newLeftId), true);
+ ICachedPage newLeftNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, newLeftId), true);
pins++;
- newLeftNode.acquireWriteLatch(); // TODO: think about whether latching is really required
+ newLeftNode.acquireWriteLatch(); // TODO: think about whether
+ // latching is really
+ // required
writeLatchesAcquired++;
try {
// copy left child to new left child
@@ -521,15 +528,15 @@
}
}
- public void insert(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.reset();
- ctx.pred.setLowKeyComparator(cmp);
- ctx.pred.setHighKeyComparator(cmp);
+ public void insert(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.pred.setLowKeyComparator(cmp);
+ ctx.pred.setHighKeyComparator(cmp);
ctx.pred.setLowKey(tuple, true);
ctx.pred.setHighKey(tuple, true);
ctx.splitKey.reset();
ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
+
boolean repeatOp = true;
// we use this loop to deal with possibly multiple operation restarts
// due to ongoing structure modifications during the descent
@@ -553,142 +560,148 @@
repeatOp = false;
}
}
-
+
public long uselessCompressionTime = 0;
-
+
private void insertLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.leafFrame.setPage(node);
- ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
- SpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
- switch (spaceStatus) {
-
- case SUFFICIENT_CONTIGUOUS_SPACE: {
- //System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
- ctx.leafFrame.insert(tuple, cmp);
- ctx.splitKey.reset();
- } break;
+ ctx.leafFrame.setPage(node);
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ SpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ switch (spaceStatus) {
- case SUFFICIENT_SPACE: {
- //System.out.println("SUFFICIENT_SPACE");
- ctx.leafFrame.compact(cmp);
- ctx.leafFrame.insert(tuple, cmp);
- ctx.splitKey.reset();
- } break;
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ // System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
+ ctx.leafFrame.insert(tuple, cmp);
+ ctx.splitKey.reset();
+ }
+ break;
- case INSUFFICIENT_SPACE: {
- //System.out.println("INSUFFICIENT_SPACE");
-
- // try compressing the page first and see if there is space available
- long start = System.currentTimeMillis();
- boolean reCompressed = ctx.leafFrame.compress(cmp);
- long end = System.currentTimeMillis();
- if(reCompressed) spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
-
- if(spaceStatus == SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
- ctx.leafFrame.insert(tuple, cmp);
- ctx.splitKey.reset();
-
- usefulCompression++;
- }
- else {
-
- uselessCompressionTime += (end - start);
- uselessCompression++;
-
- // perform split
- splitsByLevel[0]++; // debug
- int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
- ICachedPage rightSibling = null;
- if (rightSiblingPageId > 0) {
- rightSibling = bufferCache.pin(FileInfo.getDiskPageId(fileId, rightSiblingPageId), false);
- pins++;
- }
+ case SUFFICIENT_SPACE: {
+ // System.out.println("SUFFICIENT_SPACE");
+ ctx.leafFrame.compact(cmp);
+ ctx.leafFrame.insert(tuple, cmp);
+ ctx.splitKey.reset();
+ }
+ break;
- treeLatch.writeLock().lock(); // lock is released in
- // unsetSmPages(), after sm has
- // fully completed
- treeLatchesAcquired++;
- try {
+ case INSUFFICIENT_SPACE: {
+ // System.out.println("INSUFFICIENT_SPACE");
- int rightPageId = getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, rightPageId), true);
- pins++;
- rightNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- IBTreeLeafFrame rightFrame = leafFrameFactory.getFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) 0);
- rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-
- int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+ // try compressing the page first and see if there is space
+ // available
+ long start = System.currentTimeMillis();
+ boolean reCompressed = ctx.leafFrame.compress(cmp);
+ long end = System.currentTimeMillis();
+ if (reCompressed)
+ spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
- ctx.smPages.add(pageId);
- ctx.smPages.add(rightPageId);
- ctx.leafFrame.setSmFlag(true);
- rightFrame.setSmFlag(true);
+ if (spaceStatus == SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+ ctx.leafFrame.insert(tuple, cmp);
+ ctx.splitKey.reset();
- rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
- rightFrame.setPrevLeaf(pageId);
- ctx.leafFrame.setNextLeaf(rightPageId);
+ usefulCompression++;
+ } else {
- // TODO: we just use increasing numbers as pageLsn, we
- // should tie this together with the LogManager and
- // TransactionManager
- rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
- ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+ uselessCompressionTime += (end - start);
+ uselessCompression++;
- if (ret != 0) {
- ctx.splitKey.reset();
- } else {
- // System.out.print("LEAF SPLITKEY: ");
- // cmp.printKey(splitKey.getData(), 0);
- // System.out.println("");
+ // perform split
+ splitsByLevel[0]++; // debug
+ int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
+ ICachedPage rightSibling = null;
+ if (rightSiblingPageId > 0) {
+ rightSibling = bufferCache.pin(FileHandle.getDiskPageId(fileId, rightSiblingPageId), false);
+ pins++;
+ }
- ctx.splitKey.setPages(pageId, rightPageId);
- }
- if (rightSibling != null) {
- rightSibling.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- rightFrame.setPage(rightSibling); // reuse
- // rightFrame
- // for
- // modification
- rightFrame.setPrevLeaf(rightPageId);
- } finally {
- rightSibling.releaseWriteLatch();
- writeLatchesReleased++;
- }
- }
- } finally {
- rightNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rightNode);
- unpins++;
- }
- } catch (Exception e) {
- treeLatch.writeLock().unlock();
- treeLatchesReleased++;
- throw e;
- } finally {
- if (rightSibling != null) {
- bufferCache.unpin(rightSibling);
- unpins++;
- }
- }
- }
- } break;
-
- }
+ treeLatch.writeLock().lock(); // lock is released in
+ // unsetSmPages(), after sm has
+ // fully completed
+ treeLatchesAcquired++;
+ try {
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
+ int rightPageId = getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, rightPageId), true);
+ pins++;
+ rightNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ IBTreeLeafFrame rightFrame = leafFrameFactory.getFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+
+ int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+
+ ctx.smPages.add(pageId);
+ ctx.smPages.add(rightPageId);
+ ctx.leafFrame.setSmFlag(true);
+ rightFrame.setSmFlag(true);
+
+ rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
+ rightFrame.setPrevLeaf(pageId);
+ ctx.leafFrame.setNextLeaf(rightPageId);
+
+ // TODO: we just use increasing numbers as pageLsn,
+ // we
+ // should tie this together with the LogManager and
+ // TransactionManager
+ rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+ ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ // System.out.print("LEAF SPLITKEY: ");
+ // cmp.printKey(splitKey.getData(), 0);
+ // System.out.println("");
+
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ if (rightSibling != null) {
+ rightSibling.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ rightFrame.setPage(rightSibling); // reuse
+ // rightFrame
+ // for
+ // modification
+ rightFrame.setPrevLeaf(rightPageId);
+ } finally {
+ rightSibling.releaseWriteLatch();
+ writeLatchesReleased++;
+ }
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rightNode);
+ unpins++;
+ }
+ } catch (Exception e) {
+ treeLatch.writeLock().unlock();
+ treeLatchesReleased++;
+ throw e;
+ } finally {
+ if (rightSibling != null) {
+ bufferCache.unpin(rightSibling);
+ unpins++;
+ }
+ }
+ }
+ }
+ break;
+
+ }
+
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
}
- private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
+ throws Exception {
ctx.interiorFrame.setPage(node);
ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
SpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
@@ -696,7 +709,7 @@
case INSUFFICIENT_SPACE: {
splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
int rightPageId = getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, rightPageId), true);
+ ICachedPage rightNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, rightPageId), true);
pins++;
rightNode.acquireWriteLatch();
writeLatchesAcquired++;
@@ -754,16 +767,16 @@
}
}
-
- public void delete(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.reset();
- ctx.pred.setLowKeyComparator(cmp);
- ctx.pred.setHighKeyComparator(cmp);
+
+ public void delete(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.pred.setLowKeyComparator(cmp);
+ ctx.pred.setHighKeyComparator(cmp);
ctx.pred.setLowKey(tuple, true);
- ctx.pred.setHighKey(tuple, true);
+ ctx.pred.setHighKey(tuple, true);
ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
+ ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
+
boolean repeatOp = true;
// we use this loop to deal with possibly multiple operation restarts
// due to ongoing structure modifications during the descent
@@ -779,7 +792,7 @@
// tree is empty, reset level to zero
if (ctx.splitKey.getBuffer() != null) {
- ICachedPage rootNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, rootPage), false);
+ ICachedPage rootNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, rootPage), false);
pins++;
rootNode.acquireWriteLatch();
writeLatchesAcquired++;
@@ -806,30 +819,30 @@
// TODO: to avoid latch deadlock, must modify cursor to detect empty leaves
private void deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
ctx.leafFrame.setPage(node);
-
+
// will this leaf become empty?
if (ctx.leafFrame.getTupleCount() == 1) {
IBTreeLeafFrame siblingFrame = leafFrameFactory.getFrame();
-
+
ICachedPage leftNode = null;
ICachedPage rightNode = null;
int nextLeaf = ctx.leafFrame.getNextLeaf();
int prevLeaf = ctx.leafFrame.getPrevLeaf();
if (prevLeaf > 0)
- leftNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, prevLeaf), false);
+ leftNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, prevLeaf), false);
try {
if (nextLeaf > 0)
- rightNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, nextLeaf), false);
+ rightNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, nextLeaf), false);
try {
treeLatch.writeLock().lock();
treeLatchesAcquired++;
try {
- ctx.leafFrame.delete(tuple, cmp, true);
+ ctx.leafFrame.delete(tuple, cmp, true);
// to propagate the deletion we only need to make the
// splitKey != null
// we can reuse data to identify which key to delete in
@@ -912,7 +925,8 @@
}
}
- private void deleteInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ private void deleteInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
+ throws Exception {
ctx.interiorFrame.setPage(node);
// this means there is only a child pointer but no key, this case
@@ -962,7 +976,7 @@
}
private boolean isConsistent(int pageId, BTreeOpContext ctx) throws Exception {
- ICachedPage node = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+ ICachedPage node = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
node.acquireReadLatch();
readLatchesAcquired++;
@@ -980,7 +994,7 @@
}
private void performOp(int pageId, ICachedPage parent, BTreeOpContext ctx) throws Exception {
- ICachedPage node = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+ ICachedPage node = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
ctx.interiorFrame.setPage(node);
@@ -991,7 +1005,7 @@
// remember trail of pageLsns, to unwind recursion in case of an ongoing
// structure modification
ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
-
+
try {
// latch coupling, note: parent should never be write latched,
@@ -1014,14 +1028,16 @@
performOp(childPageId, node, ctx);
if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
+ ctx.pageLsns.removeLast(); // pop the restart op
+ // indicator
if (isConsistent(pageId, ctx)) {
node = null; // to avoid unpinning and
// unlatching node again in
// recursive call
continue; // descend the tree again
} else {
- ctx.pageLsns.removeLast(); // pop pageLsn of this page
+ ctx.pageLsns.removeLast(); // pop pageLsn of
+ // this page
// (version seen by this op
// during descent)
ctx.pageLsns.add(RESTART_OP); // this node is
@@ -1039,7 +1055,7 @@
case BTO_INSERT: {
if (ctx.splitKey.getBuffer() != null) {
- node = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+ node = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
node.acquireWriteLatch();
writeLatchesAcquired++;
@@ -1058,8 +1074,8 @@
break;
case BTO_DELETE: {
- if (ctx.splitKey.getBuffer() != null) {
- node = bufferCache.pin(FileInfo.getDiskPageId(fileId, pageId), false);
+ if (ctx.splitKey.getBuffer() != null) {
+ node = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
pins++;
node.acquireWriteLatch();
writeLatchesAcquired++;
@@ -1089,11 +1105,12 @@
} // end while
} else { // smFlag
ctx.opRestarts++;
- System.out.println("ONGOING SM ON PAGE " + pageId + " AT LEVEL " + ctx.interiorFrame.getLevel() + ", RESTARTS: " + ctx.opRestarts);
+ System.out.println("ONGOING SM ON PAGE " + pageId + " AT LEVEL " + ctx.interiorFrame.getLevel()
+ + ", RESTARTS: " + ctx.opRestarts);
releaseLatch(node, ctx.op, isLeaf);
bufferCache.unpin(node);
unpins++;
-
+
// TODO: this should be an instant duration lock, how to do
// this in java?
// instead we just immediately release the lock. this is
@@ -1111,23 +1128,26 @@
}
} else { // isLeaf and !smFlag
switch (ctx.op) {
- case BTO_INSERT: {
- insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- } break;
+ case BTO_INSERT: {
+ insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+ }
+ break;
case BTO_DELETE: {
deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- } break;
+ }
+ break;
case BTO_SEARCH: {
ctx.cursor.open(node, ctx.pred);
- } break;
+ }
+ break;
}
}
} catch (BTreeException e) {
- //System.out.println("BTREE EXCEPTION");
- //System.out.println(e.getMessage());
- //e.printStackTrace();
+ // System.out.println("BTREE EXCEPTION");
+ // System.out.println(e.getMessage());
+ // e.printStackTrace();
if (!e.getHandled()) {
releaseLatch(node, ctx.op, isLeaf);
bufferCache.unpin(node);
@@ -1150,41 +1170,41 @@
throw propException;
}
}
-
+
private boolean bulkNewPage = false;
- public final class BulkLoadContext {
+ public final class BulkLoadContext {
public final int slotSize;
public final int leafMaxBytes;
public final int interiorMaxBytes;
public final SplitKey splitKey;
// we maintain a frontier of nodes for each level
- private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
+ private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
private final IBTreeLeafFrame leafFrame;
private final IBTreeInteriorFrame interiorFrame;
private final IBTreeMetaDataFrame metaFrame;
-
- private final IBTreeTupleWriter tupleWriter;
-
- public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame) throws Exception {
- splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
- tupleWriter = leafFrame.getTupleWriter();
-
- NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
+ private final IBTreeTupleWriter tupleWriter;
+
+ public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
+ IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
+
+ splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
+ tupleWriter = leafFrame.getTupleWriter();
+
+ NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
leafFrontier.pageId = getFreePage(metaFrame);
- leafFrontier.page = bufferCache.pin(FileInfo.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
+ leafFrontier.page = bufferCache.pin(FileHandle.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
leafFrontier.page.acquireWriteLatch();
interiorFrame.setPage(leafFrontier.page);
interiorFrame.initBuffer((byte) 0);
interiorMaxBytes = (int) ((float) interiorFrame.getBuffer().capacity() * fillFactor);
-
+
leafFrame.setPage(leafFrontier.page);
leafFrame.initBuffer((byte) 0);
leafMaxBytes = (int) ((float) leafFrame.getBuffer().capacity() * fillFactor);
-
+
slotSize = leafFrame.getSlotSize();
this.leafFrame = leafFrame;
@@ -1194,10 +1214,10 @@
nodeFrontiers.add(leafFrontier);
}
- private void addLevel() throws Exception {
+ private void addLevel() throws HyracksDataException {
NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());
frontier.pageId = getFreePage(metaFrame);
- frontier.page = bufferCache.pin(FileInfo.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+ frontier.page = bufferCache.pin(FileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
frontier.page.acquireWriteLatch();
frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
interiorFrame.setPage(frontier.page);
@@ -1206,119 +1226,129 @@
}
}
- private void propagateBulk(BulkLoadContext ctx, int level) throws Exception {
+ private void propagateBulk(BulkLoadContext ctx, int level) throws HyracksDataException {
if (ctx.splitKey.getBuffer() == null)
return;
-
+
if (level >= ctx.nodeFrontiers.size())
ctx.addLevel();
-
+
NodeFrontier frontier = ctx.nodeFrontiers.get(level);
ctx.interiorFrame.setPage(frontier.page);
-
- ITupleReference tuple = ctx.splitKey.getTuple();
- int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount()) + ctx.slotSize + 4;
+
+ ITupleReference tuple = ctx.splitKey.getTuple();
+ int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount()) + ctx.slotSize + 4;
int spaceUsed = ctx.interiorFrame.getBuffer().capacity() - ctx.interiorFrame.getTotalFreeSpace();
if (spaceUsed + spaceNeeded > ctx.interiorMaxBytes) {
-
- SplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
- tuple = copyKey.getTuple();
-
- frontier.lastTuple.resetByOffset(frontier.page.getBuffer(), ctx.interiorFrame.getTupleOffset(ctx.interiorFrame.getTupleCount()-1));
+
+ SplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
+ tuple = copyKey.getTuple();
+
+ frontier.lastTuple.resetByOffset(frontier.page.getBuffer(), ctx.interiorFrame
+ .getTupleOffset(ctx.interiorFrame.getTupleCount() - 1));
int splitKeySize = ctx.tupleWriter.bytesRequired(frontier.lastTuple, 0, cmp.getKeyFieldCount());
ctx.splitKey.initData(splitKeySize);
- ctx.tupleWriter.writeTupleFields(frontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
+ ctx.tupleWriter
+ .writeTupleFields(frontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
ctx.splitKey.getTuple().resetByOffset(ctx.splitKey.getBuffer(), 0);
ctx.splitKey.setLeftPage(frontier.pageId);
-
+
ctx.interiorFrame.deleteGreatest(cmp);
-
+
frontier.page.releaseWriteLatch();
bufferCache.unpin(frontier.page);
frontier.pageId = getFreePage(ctx.metaFrame);
-
+
ctx.splitKey.setRightPage(frontier.pageId);
propagateBulk(ctx, level + 1);
- frontier.page = bufferCache.pin(FileInfo.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+ frontier.page = bufferCache.pin(FileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
frontier.page.acquireWriteLatch();
ctx.interiorFrame.setPage(frontier.page);
ctx.interiorFrame.initBuffer((byte) level);
}
ctx.interiorFrame.insertSorted(tuple, cmp);
-
+
// debug print
- //ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- //String s = ctx.interiorFrame.printKeys(cmp, btreeSerde);
- //System.out.println(s);
+ // ISerializerDeserializer[] btreeSerde = {
+ // UTF8StringSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // String s = ctx.interiorFrame.printKeys(cmp, btreeSerde);
+ // System.out.println(s);
}
-
+
// assumes btree has been created and opened
- public BulkLoadContext beginBulkLoad(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, IBTreeMetaDataFrame metaFrame) throws Exception {
-
- if(loaded) throw new BTreeException("Trying to bulk-load BTree but has BTree already been loaded.");
-
- BulkLoadContext ctx = new BulkLoadContext(fillFactor, leafFrame, interiorFrame, metaFrame);
+ public BulkLoadContext beginBulkLoad(float fillFactor, IBTreeLeafFrame leafFrame,
+ IBTreeInteriorFrame interiorFrame, IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
+
+ if (loaded)
+ throw new HyracksDataException("Trying to bulk-load BTree but has BTree already been loaded.");
+
+ BulkLoadContext ctx = new BulkLoadContext(fillFactor, leafFrame, interiorFrame, metaFrame);
ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(cmp.getFieldCount());
ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
return ctx;
}
-
- public void bulkLoadAddTuple(BulkLoadContext ctx, ITupleReference tuple) throws Exception {
+
+ public void bulkLoadAddTuple(BulkLoadContext ctx, ITupleReference tuple) throws HyracksDataException {
NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
IBTreeLeafFrame leafFrame = ctx.leafFrame;
-
- int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple) + ctx.slotSize;
+
+ int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple) + ctx.slotSize;
int spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
-
+
// try to free space by compression
- if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
- leafFrame.compress(cmp);
- spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
+ if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+ leafFrame.compress(cmp);
+ spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
}
-
- if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
- leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount()-1);
- int splitKeySize = ctx.tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
+
+ if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+ leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
+ int splitKeySize = ctx.tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
ctx.splitKey.initData(splitKeySize);
- ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
+ ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey
+ .getBuffer(), 0);
ctx.splitKey.getTuple().resetByOffset(ctx.splitKey.getBuffer(), 0);
ctx.splitKey.setLeftPage(leafFrontier.pageId);
int prevPageId = leafFrontier.pageId;
leafFrontier.pageId = getFreePage(ctx.metaFrame);
-
+
leafFrame.setNextLeaf(leafFrontier.pageId);
leafFrontier.page.releaseWriteLatch();
bufferCache.unpin(leafFrontier.page);
-
+
ctx.splitKey.setRightPage(leafFrontier.pageId);
propagateBulk(ctx, 1);
-
- leafFrontier.page = bufferCache.pin(FileInfo.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
+
+ leafFrontier.page = bufferCache.pin(FileHandle.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
leafFrontier.page.acquireWriteLatch();
leafFrame.setPage(leafFrontier.page);
leafFrame.initBuffer((byte) 0);
leafFrame.setPrevLeaf(prevPageId);
}
-
+
leafFrame.setPage(leafFrontier.page);
leafFrame.insertSorted(tuple, cmp);
-
+
// debug print
- //ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- //String s = leafFrame.printKeys(cmp, btreeSerde);
- //System.out.println(s);
+ // ISerializerDeserializer[] btreeSerde = {
+ // UTF8StringSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // String s = leafFrame.printKeys(cmp, btreeSerde);
+ // System.out.println(s);
}
-
- public void endBulkLoad(BulkLoadContext ctx) throws Exception {
+
+ public void endBulkLoad(BulkLoadContext ctx) throws HyracksDataException {
// copy root
- ICachedPage rootNode = bufferCache.pin(FileInfo.getDiskPageId(fileId, rootPage), bulkNewPage);
+ ICachedPage rootNode = bufferCache.pin(FileHandle.getDiskPageId(fileId, rootPage), bulkNewPage);
rootNode.acquireWriteLatch();
- try {
+ try {
ICachedPage toBeRoot = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1).page;
- System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode.getBuffer().array(), 0, toBeRoot.getBuffer().capacity());
- } finally {
+ System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode.getBuffer().array(), 0, toBeRoot.getBuffer()
+ .capacity());
+ } finally {
rootNode.releaseWriteLatch();
bufferCache.unpin(rootNode);
@@ -1330,16 +1360,16 @@
}
// debug
currentLevel = (byte) ctx.nodeFrontiers.size();
-
+
loaded = true;
}
-
+
public BTreeOpContext createOpContext(BTreeOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame) {
- // TODO: figure out better tree-height hint
- return new BTreeOpContext(op, leafFrame, interiorFrame, metaFrame, 6);
+ IBTreeMetaDataFrame metaFrame) {
+ // TODO: figure out better tree-height hint
+ return new BTreeOpContext(op, leafFrame, interiorFrame, metaFrame, 6);
}
-
+
public IBTreeInteriorFrameFactory getInteriorFrameFactory() {
return interiorFrameFactory;
}
@@ -1347,7 +1377,7 @@
public IBTreeLeafFrameFactory getLeafFrameFactory() {
return leafFrameFactory;
}
-
+
public MultiComparator getMultiComparator() {
return cmp;
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
index e5a5393..7ec61ed 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
@@ -16,23 +16,23 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
public class BTreeException extends Exception {
-
- private static final long serialVersionUID = 1L;
- private boolean handled = false;
-
- public BTreeException(Exception e) {
+
+ private static final long serialVersionUID = 1L;
+ private boolean handled = false;
+
+ public BTreeException(Exception e) {
super(e);
}
-
- public BTreeException(String message) {
+
+ public BTreeException(String message) {
super(message);
}
-
- public void setHandled(boolean handled) {
- this.handled = handled;
- }
-
- public boolean getHandled() {
- return handled;
- }
+
+ public void setHandled(boolean handled) {
+ this.handled = handled;
+ }
+
+ public boolean getHandled() {
+ return handled;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
index 4c0ef81..d698c8a 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
@@ -16,8 +16,5 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
public enum BTreeOp {
- BTO_INSERT,
- BTO_DELETE,
- BTO_UPDATE,
- BTO_SEARCH
+ BTO_INSERT, BTO_DELETE, BTO_UPDATE, BTO_SEARCH
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
index f15ded3..84c11a2 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -21,43 +21,45 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
public final class BTreeOpContext {
- public final BTreeOp op;
- public final IBTreeLeafFrame leafFrame;
- public final IBTreeInteriorFrame interiorFrame;
- public final IBTreeMetaDataFrame metaFrame;
- public IBTreeCursor cursor;
- public RangePredicate pred;
- public final SplitKey splitKey;
- public int opRestarts = 0;
- public final IntArrayList pageLsns; // used like a stack
- public final IntArrayList smPages;
- public final IntArrayList freePages;
+ public final BTreeOp op;
+ public final IBTreeLeafFrame leafFrame;
+ public final IBTreeInteriorFrame interiorFrame;
+ public final IBTreeMetaDataFrame metaFrame;
+ public IBTreeCursor cursor;
+ public RangePredicate pred;
+ public final SplitKey splitKey;
+ public int opRestarts = 0;
+ public final IntArrayList pageLsns; // used like a stack
+ public final IntArrayList smPages;
+ public final IntArrayList freePages;
- public BTreeOpContext(BTreeOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame, int treeHeightHint) {
- this.op = op;
- this.leafFrame = leafFrame;
- this.interiorFrame = interiorFrame;
- this.metaFrame = metaFrame;
-
- pageLsns = new IntArrayList(treeHeightHint, treeHeightHint);
- if(op != BTreeOp.BTO_SEARCH) {
- smPages = new IntArrayList(treeHeightHint, treeHeightHint);
- freePages = new IntArrayList(treeHeightHint, treeHeightHint);
- pred = new RangePredicate(true, null, null, true, true, null, null);
- splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
- }
- else {
- smPages = null;
- freePages = null;
- splitKey = null;
- }
- }
-
- public void reset() {
- if(pageLsns != null) pageLsns.clear();
- if(freePages != null) freePages.clear();
- if(smPages != null) smPages.clear();
- opRestarts = 0;
- }
+ public BTreeOpContext(BTreeOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
+ IBTreeMetaDataFrame metaFrame, int treeHeightHint) {
+ this.op = op;
+ this.leafFrame = leafFrame;
+ this.interiorFrame = interiorFrame;
+ this.metaFrame = metaFrame;
+
+ pageLsns = new IntArrayList(treeHeightHint, treeHeightHint);
+ if (op != BTreeOp.BTO_SEARCH) {
+ smPages = new IntArrayList(treeHeightHint, treeHeightHint);
+ freePages = new IntArrayList(treeHeightHint, treeHeightHint);
+ pred = new RangePredicate(true, null, null, true, true, null, null);
+ splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
+ } else {
+ smPages = null;
+ freePages = null;
+ splitKey = null;
+ }
+ }
+
+ public void reset() {
+ if (pageLsns != null)
+ pageLsns.clear();
+ if (freePages != null)
+ freePages.clear();
+ if (smPages != null)
+ smPages.clear();
+ opRestarts = 0;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java
index 8be8b9c..e213023 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java
@@ -15,133 +15,138 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
public class DiskOrderScanCursor implements IBTreeCursor {
-
- // TODO: might want to return tuples in physical order, not logical order to speed up access
-
+
+ // TODO: might want to return tuples in physical order, not logical order to
+ // speed up access
+
private int tupleIndex = 0;
- private int fileId = -1;
+ private int fileId = -1;
int currentPageId = -1;
- int maxPageId = -1; // TODO: figure out how to scan to the end of file, this is dirty and may not with concurrent updates
+ int maxPageId = -1; // TODO: figure out how to scan to the end of file, this
+ // is dirty and may not with concurrent updates
private ICachedPage page = null;
private IBTreeLeafFrame frame = null;
private IBufferCache bufferCache = null;
-
+
private IBTreeTupleReference frameTuple;
-
+
public DiskOrderScanCursor(IBTreeLeafFrame frame) {
- this.frame = frame;
- this.frameTuple = frame.getTupleWriter().createTupleReference();
+ this.frame = frame;
+ this.frameTuple = frame.getTupleWriter().createTupleReference();
}
-
+
@Override
public void close() throws Exception {
page.releaseReadLatch();
bufferCache.unpin(page);
page = null;
}
-
+
@Override
- public IBTreeTupleReference getTuple() {
- return frameTuple;
- }
-
+ public IBTreeTupleReference getTuple() {
+ return frameTuple;
+ }
+
@Override
public ICachedPage getPage() {
return page;
}
-
- private boolean positionToNextLeaf(boolean skipCurrent) throws Exception {
- while( (frame.getLevel() != 0 || skipCurrent) && (currentPageId <= maxPageId) || (frame.getTupleCount() == 0) ) {
+
+ private boolean positionToNextLeaf(boolean skipCurrent) throws HyracksDataException {
+ while ((frame.getLevel() != 0 || skipCurrent) && (currentPageId <= maxPageId) || (frame.getTupleCount() == 0)) {
currentPageId++;
-
- ICachedPage nextPage = bufferCache.pin(FileInfo.getDiskPageId(fileId, currentPageId), false);
+
+ ICachedPage nextPage = bufferCache.pin(FileHandle.getDiskPageId(fileId, currentPageId), false);
nextPage.acquireReadLatch();
-
+
page.releaseReadLatch();
bufferCache.unpin(page);
-
+
page = nextPage;
frame.setPage(page);
tupleIndex = 0;
skipCurrent = false;
- }
- if(currentPageId <= maxPageId) return true;
- else return false;
+ }
+ if (currentPageId <= maxPageId)
+ return true;
+ else
+ return false;
}
-
+
@Override
- public boolean hasNext() throws Exception {
- if(tupleIndex >= frame.getTupleCount()) {
+ public boolean hasNext() throws Exception {
+ if (tupleIndex >= frame.getTupleCount()) {
boolean nextLeafExists = positionToNextLeaf(true);
- if(nextLeafExists) {
- frameTuple.resetByTupleIndex(frame, tupleIndex);
+ if (nextLeafExists) {
+ frameTuple.resetByTupleIndex(frame, tupleIndex);
return true;
+ } else {
+ return false;
}
- else {
- return false;
- }
- }
-
- frameTuple.resetByTupleIndex(frame, tupleIndex);
+ }
+
+ frameTuple.resetByTupleIndex(frame, tupleIndex);
return true;
}
@Override
- public void next() throws Exception {
- tupleIndex++;
+ public void next() throws Exception {
+ tupleIndex++;
}
-
+
@Override
- public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception {
+ public void open(ICachedPage page, ISearchPredicate searchPred) throws HyracksDataException {
// in case open is called multiple times without closing
- if(this.page != null) {
+ if (this.page != null) {
this.page.releaseReadLatch();
bufferCache.unpin(this.page);
}
-
+
this.page = page;
tupleIndex = 0;
frame.setPage(page);
- RangePredicate pred = (RangePredicate)searchPred;
- MultiComparator lowKeyCmp = pred.getLowKeyComparator();
- frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
+ RangePredicate pred = (RangePredicate) searchPred;
+ MultiComparator lowKeyCmp = pred.getLowKeyComparator();
+ frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
boolean leafExists = positionToNextLeaf(false);
- if(!leafExists) {
- throw new Exception("Failed to open disk-order scan cursor for B-tree. Traget B-tree has no leaves.");
+ if (!leafExists) {
+ throw new HyracksDataException(
+ "Failed to open disk-order scan cursor for B-tree. Traget B-tree has no leaves.");
}
}
-
+
@Override
public void reset() {
tupleIndex = 0;
currentPageId = -1;
maxPageId = -1;
- page = null;
+ page = null;
}
@Override
public void setBufferCache(IBufferCache bufferCache) {
- this.bufferCache = bufferCache;
+ this.bufferCache = bufferCache;
}
@Override
public void setFileId(int fileId) {
this.fileId = fileId;
- }
-
+ }
+
public void setCurrentPageId(int currentPageId) {
this.currentPageId = currentPageId;
}
-
+
public void setMaxPageId(int maxPageId) {
this.maxPageId = maxPageId;
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
index 320025c..8711a17 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.impls;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
@@ -6,20 +21,20 @@
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleReference;
public class FieldPrefixPrefixTupleReference extends TypeAwareTupleReference {
-
- public FieldPrefixPrefixTupleReference(ITypeTrait[] typeTraits) {
- super(typeTraits);
- }
-
- // assumes tuple index refers to prefix tuples
- @Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- FieldPrefixNSMLeafFrame concreteFrame = (FieldPrefixNSMLeafFrame)frame;
- int prefixSlotOff = concreteFrame.slotManager.getPrefixSlotOff(tupleIndex);
- int prefixSlot = concreteFrame.getBuffer().getInt(prefixSlotOff);
- setFieldCount(concreteFrame.slotManager.decodeFirstSlotField(prefixSlot));
- tupleStartOff = concreteFrame.slotManager.decodeSecondSlotField(prefixSlot);
- buf = concreteFrame.getBuffer();
- resetByOffset(buf, tupleStartOff);
- }
+
+ public FieldPrefixPrefixTupleReference(ITypeTrait[] typeTraits) {
+ super(typeTraits);
+ }
+
+ // assumes tuple index refers to prefix tuples
+ @Override
+ public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
+ FieldPrefixNSMLeafFrame concreteFrame = (FieldPrefixNSMLeafFrame) frame;
+ int prefixSlotOff = concreteFrame.slotManager.getPrefixSlotOff(tupleIndex);
+ int prefixSlot = concreteFrame.getBuffer().getInt(prefixSlotOff);
+ setFieldCount(concreteFrame.slotManager.decodeFirstSlotField(prefixSlot));
+ tupleStartOff = concreteFrame.slotManager.decodeSecondSlotField(prefixSlot);
+ buf = concreteFrame.getBuffer();
+ resetByOffset(buf, tupleStartOff);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
index 44dde87..83a2d92 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -23,209 +23,229 @@
import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
public class FieldPrefixSlotManager implements IPrefixSlotManager {
-
- private static final int slotSize = 4;
- public static final int TUPLE_UNCOMPRESSED = 0xFF;
- public static final int MAX_PREFIX_SLOTS = 0xFE;
- public static final int GREATEST_SLOT = 0x00FFFFFF;
-
- private ByteBuffer buf;
- private FieldPrefixNSMLeafFrame frame;
-
- public int decodeFirstSlotField(int slot) {
- return (slot & 0xFF000000) >>> 24;
- }
-
- public int decodeSecondSlotField(int slot) {
- return slot & 0x00FFFFFF;
- }
-
- public int encodeSlotFields(int firstField, int secondField) {
- return ((firstField & 0x000000FF) << 24) | (secondField & 0x00FFFFFF);
- }
-
- // returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
- public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp) {
- int prefixMid;
- int prefixBegin = 0;
- int prefixEnd = frame.getPrefixTupleCount() - 1;
-
- if(frame.getPrefixTupleCount() > 0) {
- while(prefixBegin <= prefixEnd) {
- prefixMid = (prefixBegin + prefixEnd) / 2;
- framePrefixTuple.resetByTupleIndex(frame, prefixMid);
- int cmp = multiCmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
- if(cmp < 0) prefixEnd = prefixMid - 1;
- else if(cmp > 0) prefixBegin = prefixMid + 1;
- else return prefixMid;
- }
- }
-
- return FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
- }
-
- @Override
- public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
- if(frame.getTupleCount() <= 0) encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_SLOT);
-
- frameTuple.setFieldCount(multiCmp.getFieldCount());
-
- int prefixMid;
- int prefixBegin = 0;
- int prefixEnd = frame.getPrefixTupleCount() - 1;
- int prefixMatch = TUPLE_UNCOMPRESSED;
-
- // bounds are inclusive on both ends
- int tuplePrefixSlotNumLbound = prefixBegin;
- int tuplePrefixSlotNumUbound = prefixEnd;
-
- // binary search on the prefix slots to determine upper and lower bounds for the prefixSlotNums in tuple slots
- while(prefixBegin <= prefixEnd) {
- prefixMid = (prefixBegin + prefixEnd) / 2;
- framePrefixTuple.resetByTupleIndex(frame, prefixMid);
- int cmp = multiCmp.fieldRangeCompare(searchKey, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
- if(cmp < 0) {
- prefixEnd = prefixMid - 1;
- tuplePrefixSlotNumLbound = prefixMid - 1;
- }
- else if(cmp > 0) {
- prefixBegin = prefixMid + 1;
- tuplePrefixSlotNumUbound = prefixMid + 1;
- }
- else {
- if(mode == FindTupleMode.FTM_EXCLUSIVE) {
- if(matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) prefixBegin = prefixMid + 1;
- else prefixEnd = prefixMid - 1;
- }
- else {
- tuplePrefixSlotNumLbound = prefixMid;
- tuplePrefixSlotNumUbound = prefixMid;
- prefixMatch = prefixMid;
- }
-
- break;
- }
- }
-
- //System.out.println("SLOTLBOUND: " + tuplePrefixSlotNumLbound);
- //System.out.println("SLOTUBOUND: " + tuplePrefixSlotNumUbound);
-
- int tupleMid = -1;
- int tupleBegin = 0;
- int tupleEnd = frame.getTupleCount() - 1;
-
- // binary search on tuples, guided by the lower and upper bounds on prefixSlotNum
- while(tupleBegin <= tupleEnd) {
- tupleMid = (tupleBegin + tupleEnd) / 2;
+
+ private static final int slotSize = 4;
+ public static final int TUPLE_UNCOMPRESSED = 0xFF;
+ public static final int MAX_PREFIX_SLOTS = 0xFE;
+ public static final int GREATEST_SLOT = 0x00FFFFFF;
+
+ private ByteBuffer buf;
+ private FieldPrefixNSMLeafFrame frame;
+
+ public int decodeFirstSlotField(int slot) {
+ return (slot & 0xFF000000) >>> 24;
+ }
+
+ public int decodeSecondSlotField(int slot) {
+ return slot & 0x00FFFFFF;
+ }
+
+ public int encodeSlotFields(int firstField, int secondField) {
+ return ((firstField & 0x000000FF) << 24) | (secondField & 0x00FFFFFF);
+ }
+
+ // returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
+ public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp) {
+ int prefixMid;
+ int prefixBegin = 0;
+ int prefixEnd = frame.getPrefixTupleCount() - 1;
+
+ if (frame.getPrefixTupleCount() > 0) {
+ while (prefixBegin <= prefixEnd) {
+ prefixMid = (prefixBegin + prefixEnd) / 2;
+ framePrefixTuple.resetByTupleIndex(frame, prefixMid);
+ int cmp = multiCmp.fieldRangeCompare(tuple, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
+ if (cmp < 0)
+ prefixEnd = prefixMid - 1;
+ else if (cmp > 0)
+ prefixBegin = prefixMid + 1;
+ else
+ return prefixMid;
+ }
+ }
+
+ return FieldPrefixSlotManager.TUPLE_UNCOMPRESSED;
+ }
+
+ @Override
+ public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple,
+ IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+ FindTupleNoExactMatchPolicy matchPolicy) {
+ if (frame.getTupleCount() <= 0)
+ encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_SLOT);
+
+ frameTuple.setFieldCount(multiCmp.getFieldCount());
+
+ int prefixMid;
+ int prefixBegin = 0;
+ int prefixEnd = frame.getPrefixTupleCount() - 1;
+ int prefixMatch = TUPLE_UNCOMPRESSED;
+
+ // bounds are inclusive on both ends
+ int tuplePrefixSlotNumLbound = prefixBegin;
+ int tuplePrefixSlotNumUbound = prefixEnd;
+
+ // binary search on the prefix slots to determine upper and lower bounds
+ // for the prefixSlotNums in tuple slots
+ while (prefixBegin <= prefixEnd) {
+ prefixMid = (prefixBegin + prefixEnd) / 2;
+ framePrefixTuple.resetByTupleIndex(frame, prefixMid);
+ int cmp = multiCmp.fieldRangeCompare(searchKey, framePrefixTuple, 0, framePrefixTuple.getFieldCount());
+ if (cmp < 0) {
+ prefixEnd = prefixMid - 1;
+ tuplePrefixSlotNumLbound = prefixMid - 1;
+ } else if (cmp > 0) {
+ prefixBegin = prefixMid + 1;
+ tuplePrefixSlotNumUbound = prefixMid + 1;
+ } else {
+ if (mode == FindTupleMode.FTM_EXCLUSIVE) {
+ if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+ prefixBegin = prefixMid + 1;
+ else
+ prefixEnd = prefixMid - 1;
+ } else {
+ tuplePrefixSlotNumLbound = prefixMid;
+ tuplePrefixSlotNumUbound = prefixMid;
+ prefixMatch = prefixMid;
+ }
+
+ break;
+ }
+ }
+
+ // System.out.println("SLOTLBOUND: " + tuplePrefixSlotNumLbound);
+ // System.out.println("SLOTUBOUND: " + tuplePrefixSlotNumUbound);
+
+ int tupleMid = -1;
+ int tupleBegin = 0;
+ int tupleEnd = frame.getTupleCount() - 1;
+
+ // binary search on tuples, guided by the lower and upper bounds on
+ // prefixSlotNum
+ while (tupleBegin <= tupleEnd) {
+ tupleMid = (tupleBegin + tupleEnd) / 2;
int tupleSlotOff = getTupleSlotOff(tupleMid);
- int tupleSlot = buf.getInt(tupleSlotOff);
+ int tupleSlot = buf.getInt(tupleSlotOff);
int prefixSlotNum = decodeFirstSlotField(tupleSlot);
-
- //System.out.println("RECS: " + recBegin + " " + recMid + " " + recEnd);
- int cmp = 0;
- if(prefixSlotNum == TUPLE_UNCOMPRESSED) {
- frameTuple.resetByTupleIndex(frame, tupleMid);
- cmp = multiCmp.compare(searchKey, frameTuple);
+
+ // System.out.println("RECS: " + recBegin + " " + recMid + " " +
+ // recEnd);
+ int cmp = 0;
+ if (prefixSlotNum == TUPLE_UNCOMPRESSED) {
+ frameTuple.resetByTupleIndex(frame, tupleMid);
+ cmp = multiCmp.compare(searchKey, frameTuple);
+ } else {
+ if (prefixSlotNum < tuplePrefixSlotNumLbound)
+ cmp = 1;
+ else if (prefixSlotNum > tuplePrefixSlotNumUbound)
+ cmp = -1;
+ else {
+ frameTuple.resetByTupleIndex(frame, tupleMid);
+ cmp = multiCmp.compare(searchKey, frameTuple);
+ }
}
- else {
- if(prefixSlotNum < tuplePrefixSlotNumLbound) cmp = 1;
- else if(prefixSlotNum > tuplePrefixSlotNumUbound) cmp = -1;
- else {
- frameTuple.resetByTupleIndex(frame, tupleMid);
- cmp = multiCmp.compare(searchKey, frameTuple);
- }
- }
-
- if(cmp < 0) tupleEnd = tupleMid - 1;
- else if(cmp > 0) tupleBegin = tupleMid + 1;
+
+ if (cmp < 0)
+ tupleEnd = tupleMid - 1;
+ else if (cmp > 0)
+ tupleBegin = tupleMid + 1;
else {
- if(mode == FindTupleMode.FTM_EXCLUSIVE) {
- if(matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) tupleBegin = tupleMid + 1;
- else tupleEnd = tupleMid - 1;
- }
- else {
- return encodeSlotFields(prefixMatch, tupleMid);
- }
+ if (mode == FindTupleMode.FTM_EXCLUSIVE) {
+ if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+ tupleBegin = tupleMid + 1;
+ else
+ tupleEnd = tupleMid - 1;
+ } else {
+ return encodeSlotFields(prefixMatch, tupleMid);
+ }
}
}
-
- //System.out.println("RECS: " + recBegin + " " + recMid + " " + recEnd);
-
- if(mode == FindTupleMode.FTM_EXACT) return encodeSlotFields(prefixMatch, GREATEST_SLOT);
-
- // do final comparison to determine whether the search key is greater than all keys or in between some existing keys
- if(matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
- if(tupleBegin > frame.getTupleCount() - 1) return encodeSlotFields(prefixMatch, GREATEST_SLOT);
- frameTuple.resetByTupleIndex(frame, tupleBegin);
- if(multiCmp.compare(searchKey, frameTuple) < 0) return encodeSlotFields(prefixMatch, tupleBegin);
- else return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+
+ // System.out.println("RECS: " + recBegin + " " + recMid + " " +
+ // recEnd);
+
+ if (mode == FindTupleMode.FTM_EXACT)
+ return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+
+ // do final comparison to determine whether the search key is greater
+ // than all keys or in between some existing keys
+ if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
+ if (tupleBegin > frame.getTupleCount() - 1)
+ return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+ frameTuple.resetByTupleIndex(frame, tupleBegin);
+ if (multiCmp.compare(searchKey, frameTuple) < 0)
+ return encodeSlotFields(prefixMatch, tupleBegin);
+ else
+ return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+ } else {
+ if (tupleEnd < 0)
+ return encodeSlotFields(prefixMatch, GREATEST_SLOT);
+ frameTuple.resetByTupleIndex(frame, tupleEnd);
+ if (multiCmp.compare(searchKey, frameTuple) > 0)
+ return encodeSlotFields(prefixMatch, tupleEnd);
+ else
+ return encodeSlotFields(prefixMatch, GREATEST_SLOT);
}
- else {
- if(tupleEnd < 0) return encodeSlotFields(prefixMatch, GREATEST_SLOT);
- frameTuple.resetByTupleIndex(frame, tupleEnd);
- if(multiCmp.compare(searchKey, frameTuple) > 0) return encodeSlotFields(prefixMatch, tupleEnd);
- else return encodeSlotFields(prefixMatch, GREATEST_SLOT);
- }
- }
-
- public int getPrefixSlotStartOff() {
- return buf.capacity() - slotSize;
- }
-
- public int getPrefixSlotEndOff() {
- return buf.capacity() - slotSize * frame.getPrefixTupleCount();
- }
-
- public int getTupleSlotStartOff() {
- return getPrefixSlotEndOff() - slotSize;
- }
-
- public int getTupleSlotEndOff() {
- return buf.capacity() - slotSize * (frame.getPrefixTupleCount() + frame.getTupleCount());
- }
-
- public int getSlotSize() {
- return slotSize;
- }
-
- public void setSlot(int offset, int value) {
- frame.getBuffer().putInt(offset, value);
- }
-
- public int insertSlot(int slot, int tupleOff) {
- int slotNum = decodeSecondSlotField(slot);
- if(slotNum == GREATEST_SLOT) {
- int slotOff = getTupleSlotEndOff() - slotSize;
- int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
- setSlot(slotOff, newSlot);
- return newSlot;
- }
- else {
- int slotEndOff = getTupleSlotEndOff();
- int slotOff = getTupleSlotOff(slotNum);
- int length = (slotOff - slotEndOff) + slotSize;
- System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize, length);
-
- int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
- setSlot(slotOff, newSlot);
- return newSlot;
- }
- }
-
- public void setFrame(FieldPrefixNSMLeafFrame frame) {
- this.frame = frame;
- this.buf = frame.getBuffer();
- }
-
- public int getPrefixSlotOff(int tupleIndex) {
- return getPrefixSlotStartOff() - tupleIndex * slotSize;
- }
-
- public int getTupleSlotOff(int tupleIndex) {
- return getTupleSlotStartOff() - tupleIndex * slotSize;
- }
-
- public void setPrefixSlot(int tupleIndex, int slot) {
- buf.putInt(getPrefixSlotOff(tupleIndex), slot);
- }
+ }
+
+ public int getPrefixSlotStartOff() {
+ return buf.capacity() - slotSize;
+ }
+
+ public int getPrefixSlotEndOff() {
+ return buf.capacity() - slotSize * frame.getPrefixTupleCount();
+ }
+
+ public int getTupleSlotStartOff() {
+ return getPrefixSlotEndOff() - slotSize;
+ }
+
+ public int getTupleSlotEndOff() {
+ return buf.capacity() - slotSize * (frame.getPrefixTupleCount() + frame.getTupleCount());
+ }
+
+ public int getSlotSize() {
+ return slotSize;
+ }
+
+ public void setSlot(int offset, int value) {
+ frame.getBuffer().putInt(offset, value);
+ }
+
+ public int insertSlot(int slot, int tupleOff) {
+ int slotNum = decodeSecondSlotField(slot);
+ if (slotNum == GREATEST_SLOT) {
+ int slotOff = getTupleSlotEndOff() - slotSize;
+ int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
+ setSlot(slotOff, newSlot);
+ return newSlot;
+ } else {
+ int slotEndOff = getTupleSlotEndOff();
+ int slotOff = getTupleSlotOff(slotNum);
+ int length = (slotOff - slotEndOff) + slotSize;
+ System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
+ length);
+
+ int newSlot = encodeSlotFields(decodeFirstSlotField(slot), tupleOff);
+ setSlot(slotOff, newSlot);
+ return newSlot;
+ }
+ }
+
+ public void setFrame(FieldPrefixNSMLeafFrame frame) {
+ this.frame = frame;
+ this.buf = frame.getBuffer();
+ }
+
+ public int getPrefixSlotOff(int tupleIndex) {
+ return getPrefixSlotStartOff() - tupleIndex * slotSize;
+ }
+
+ public int getTupleSlotOff(int tupleIndex) {
+ return getTupleSlotStartOff() - tupleIndex * slotSize;
+ }
+
+ public void setPrefixSlot(int tupleIndex, int slot) {
+ buf.putInt(getPrefixSlotOff(tupleIndex), slot);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
index d818e0a..c131e3f 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
@@ -7,90 +7,87 @@
import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
public class FieldPrefixTupleReference implements IBTreeTupleReference {
-
- private FieldPrefixNSMLeafFrame frame;
- private int prefixTupleStartOff;
- private int suffixTupleStartOff;
- private int numPrefixFields;
- private int fieldCount;
- private IBTreeTupleReference helperTuple;
-
- public FieldPrefixTupleReference(IBTreeTupleReference helperTuple) {
- this.helperTuple = helperTuple;
- }
-
- @Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- this.frame = (FieldPrefixNSMLeafFrame)frame;
-
- int tupleSlotOff = this.frame.slotManager.getTupleSlotOff(tupleIndex);
- int tupleSlot = this.frame.getBuffer().getInt(tupleSlotOff);
+
+ private FieldPrefixNSMLeafFrame frame;
+ private int prefixTupleStartOff;
+ private int suffixTupleStartOff;
+ private int numPrefixFields;
+ private int fieldCount;
+ private IBTreeTupleReference helperTuple;
+
+ public FieldPrefixTupleReference(IBTreeTupleReference helperTuple) {
+ this.helperTuple = helperTuple;
+ }
+
+ @Override
+ public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
+ this.frame = (FieldPrefixNSMLeafFrame) frame;
+
+ int tupleSlotOff = this.frame.slotManager.getTupleSlotOff(tupleIndex);
+ int tupleSlot = this.frame.getBuffer().getInt(tupleSlotOff);
int prefixSlotNum = this.frame.slotManager.decodeFirstSlotField(tupleSlot);
suffixTupleStartOff = this.frame.slotManager.decodeSecondSlotField(tupleSlot);
-
- if(prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
+
+ if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
int prefixSlotOff = this.frame.slotManager.getPrefixSlotOff(prefixSlotNum);
int prefixSlot = this.frame.getBuffer().getInt(prefixSlotOff);
numPrefixFields = this.frame.slotManager.decodeFirstSlotField(prefixSlot);
- prefixTupleStartOff = this.frame.slotManager.decodeSecondSlotField(prefixSlot);
+ prefixTupleStartOff = this.frame.slotManager.decodeSecondSlotField(prefixSlot);
+ } else {
+ numPrefixFields = 0;
+ prefixTupleStartOff = -1;
}
- else {
- numPrefixFields = 0;
- prefixTupleStartOff = -1;
+ }
+
+ @Override
+ public void setFieldCount(int fieldCount) {
+ this.fieldCount = fieldCount;
+ }
+
+ @Override
+ public void setFieldCount(int fieldStartIndex, int fieldCount) {
+ // not implemented
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldCount;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return frame.getBuffer().array();
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ if (fIdx < numPrefixFields) {
+ helperTuple.setFieldCount(numPrefixFields);
+ helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
+ return helperTuple.getFieldLength(fIdx);
+ } else {
+ helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+ helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
+ return helperTuple.getFieldLength(fIdx - numPrefixFields);
}
- }
-
- @Override
- public void setFieldCount(int fieldCount) {
- this.fieldCount = fieldCount;
- }
+ }
- @Override
- public void setFieldCount(int fieldStartIndex, int fieldCount) {
- // not implemented
- }
-
- @Override
- public int getFieldCount() {
- return fieldCount;
- }
+ @Override
+ public int getFieldStart(int fIdx) {
+ if (fIdx < numPrefixFields) {
+ helperTuple.setFieldCount(numPrefixFields);
+ helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
+ return helperTuple.getFieldStart(fIdx);
+ } else {
+ helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
+ helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
+ return helperTuple.getFieldStart(fIdx - numPrefixFields);
+ }
+ }
- @Override
- public byte[] getFieldData(int fIdx) {
- return frame.getBuffer().array();
- }
-
- @Override
- public int getFieldLength(int fIdx) {
- if(fIdx < numPrefixFields) {
- helperTuple.setFieldCount(numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
- return helperTuple.getFieldLength(fIdx);
- }
- else {
- helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
- return helperTuple.getFieldLength(fIdx - numPrefixFields);
- }
- }
-
- @Override
- public int getFieldStart(int fIdx) {
- if(fIdx < numPrefixFields) {
- helperTuple.setFieldCount(numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
- return helperTuple.getFieldStart(fIdx);
- }
- else {
- helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
- return helperTuple.getFieldStart(fIdx - numPrefixFields);
- }
- }
-
- // unsupported operation
- @Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOffset) {
- frame = null;
- }
+ // unsupported operation
+ @Override
+ public void resetByOffset(ByteBuffer buf, int tupleStartOffset) {
+ frame = null;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java
index 79dc343..9e2fa75 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java
@@ -1,7 +1,20 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.impls;
public enum FindTupleMode {
- FTM_INCLUSIVE,
- FTM_EXCLUSIVE,
- FTM_EXACT
+ FTM_INCLUSIVE, FTM_EXCLUSIVE, FTM_EXACT
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java
index 45f2765..1bc6ea3 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java
@@ -1,6 +1,20 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.impls;
public enum FindTupleNoExactMatchPolicy {
- FTP_LOWER_KEY,
- FTP_HIGHER_KEY
+ FTP_LOWER_KEY, FTP_HIGHER_KEY
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java
index 124cccd..2d0b9df 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java
@@ -1,48 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.impls;
public class IntArrayList {
- private int[] data;
- private int size;
- private final int growth;
-
- public IntArrayList(int initialCapacity, int growth) {
- data = new int[initialCapacity];
- size = 0;
- this.growth = growth;
- }
-
- public int size() {
- return size;
- }
-
- public void add(int i) {
- if(size == data.length) {
- int[] newData = new int[data.length + growth];
- System.arraycopy(data, 0, newData, 0, data.length);
- data = newData;
- }
-
- data[size++] = i;
- }
-
- public void removeLast() {
- if(size > 0) size--;
- }
-
- // WARNING: caller is responsible for checking size > 0
- public int getLast() {
- return data[size-1];
- }
-
- public int get(int i) {
- return data[i];
- }
-
- public void clear() {
- size = 0;
- }
-
- public boolean isEmpty() {
- return size == 0;
- }
+ private int[] data;
+ private int size;
+ private final int growth;
+
+ public IntArrayList(int initialCapacity, int growth) {
+ data = new int[initialCapacity];
+ size = 0;
+ this.growth = growth;
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public void add(int i) {
+ if (size == data.length) {
+ int[] newData = new int[data.length + growth];
+ System.arraycopy(data, 0, newData, 0, data.length);
+ data = newData;
+ }
+
+ data[size++] = i;
+ }
+
+ public void removeLast() {
+ if (size > 0)
+ size--;
+ }
+
+ // WARNING: caller is responsible for checking size > 0
+ public int getLast() {
+ return data[size - 1];
+ }
+
+ public int get(int i) {
+ return data[i];
+ }
+
+ public void clear() {
+ size = 0;
+ }
+
+ public boolean isEmpty() {
+ return size == 0;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java
index 486d732..e029af0 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java
@@ -27,78 +27,74 @@
@SuppressWarnings("unchecked")
public class MultiComparator {
-
- private static final long serialVersionUID = 1L;
-
- private IBinaryComparator[] cmps = null;
- private ITypeTrait[] typeTraits;
-
- public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
- this.typeTraits = typeTraits;
- this.cmps = cmps;
- }
-
- public int compare(ITupleReference tupleA, ITupleReference tupleB) {
- for(int i = 0; i < cmps.length; i++) {
- int cmp = cmps[i].compare(tupleA.getFieldData(i),
- tupleA.getFieldStart(i),
- tupleA.getFieldLength(i),
- tupleB.getFieldData(i),
- tupleB.getFieldStart(i),
- tupleB.getFieldLength(i));
- if(cmp < 0) return -1;
- else if(cmp > 0) return 1;
- }
- return 0;
- }
-
- public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
- for(int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
- int cmp = cmps[i].compare(
- tupleA.getFieldData(i),
- tupleA.getFieldStart(i),
- tupleA.getFieldLength(i),
- tupleB.getFieldData(i),
- tupleB.getFieldStart(i),
- tupleB.getFieldLength(i));
- if(cmp < 0) return -1;
- else if(cmp > 0) return 1;
- }
- return 0;
- }
-
- public String printTuple(ITupleReference tuple, ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[i].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- return strBuilder.toString();
- }
-
- public IBinaryComparator[] getComparators() {
- return cmps;
- }
-
- public int getKeyFieldCount() {
- return cmps.length;
- }
-
- public void setComparators(IBinaryComparator[] cmps) {
- this.cmps = cmps;
- }
-
- public int size() {
- return cmps.length;
- }
-
- public int getFieldCount() {
- return typeTraits.length;
- }
-
- public ITypeTrait[] getTypeTraits() {
- return typeTraits;
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private IBinaryComparator[] cmps = null;
+ private ITypeTrait[] typeTraits;
+
+ public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
+ this.typeTraits = typeTraits;
+ this.cmps = cmps;
+ }
+
+ public int compare(ITupleReference tupleA, ITupleReference tupleB) {
+ for (int i = 0; i < cmps.length; i++) {
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
+ .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ if (cmp < 0)
+ return -1;
+ else if (cmp > 0)
+ return 1;
+ }
+ return 0;
+ }
+
+ public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
+ for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
+ .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ if (cmp < 0)
+ return -1;
+ else if (cmp > 0)
+ return 1;
+ }
+ return 0;
+ }
+
+ public String printTuple(ITupleReference tuple, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[i].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ return strBuilder.toString();
+ }
+
+ public IBinaryComparator[] getComparators() {
+ return cmps;
+ }
+
+ public int getKeyFieldCount() {
+ return cmps.length;
+ }
+
+ public void setComparators(IBinaryComparator[] cmps) {
+ this.cmps = cmps;
+ }
+
+ public int size() {
+ return cmps.length;
+ }
+
+ public int getFieldCount() {
+ return typeTraits.length;
+ }
+
+ public ITypeTrait[] getTypeTraits() {
+ return typeTraits;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
index 138c1fa..83baa92 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
@@ -19,12 +19,11 @@
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public class NodeFrontier {
- public ICachedPage page;
- public int pageId;
- public IBTreeTupleReference lastTuple;
-
- public NodeFrontier(IBTreeTupleReference lastTuple) {
- this.lastTuple = lastTuple;
- }
-}
+ public ICachedPage page;
+ public int pageId;
+ public IBTreeTupleReference lastTuple;
+ public NodeFrontier(IBTreeTupleReference lastTuple) {
+ this.lastTuple = lastTuple;
+ }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java
index 92c91d3..258161b 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java
@@ -21,105 +21,112 @@
import edu.uci.ics.hyracks.storage.am.btree.api.ISlotManager;
public class OrderedSlotManager implements ISlotManager {
-
- private static final int slotSize = 4;
- private IBTreeFrame frame;
-
- @Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp, FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
- if(frame.getTupleCount() <= 0) return -1;
-
- int mid;
- int begin = 0;
- int end = frame.getTupleCount() - 1;
-
- while(begin <= end) {
- mid = (begin + end) / 2;
- frameTuple.resetByTupleIndex(frame, mid);
-
- int cmp = multiCmp.compare(searchKey, frameTuple);
- if(cmp < 0) {
- end = mid - 1;
- }
- else if(cmp > 0) {
- begin = mid + 1;
- }
- else {
- if(mode == FindTupleMode.FTM_EXCLUSIVE) {
- if(matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) begin = mid + 1;
- else end = mid - 1;
- }
- else {
- return mid;
- }
- }
- }
-
- if(mode == FindTupleMode.FTM_EXACT) return -1;
-
- if(matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
- if(begin > frame.getTupleCount() - 1) return -1;
- frameTuple.resetByTupleIndex(frame, begin);
- if(multiCmp.compare(searchKey, frameTuple) < 0) return begin;
- else return -1;
- }
- else {
- if(end < 0) return -1;
- frameTuple.resetByTupleIndex(frame, end);
- if(multiCmp.compare(searchKey, frameTuple) > 0) return end;
- else return -1;
- }
- }
-
- @Override
- public int getTupleOff(int offset) {
- return frame.getBuffer().getInt(offset);
- }
-
- @Override
- public void setSlot(int offset, int value) {
- frame.getBuffer().putInt(offset, value);
- }
-
- @Override
- public int getSlotEndOff() {
- return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
- }
-
- @Override
- public int getSlotStartOff() {
- return frame.getBuffer().capacity() - slotSize;
- }
- @Override
- public int getSlotSize() {
- return slotSize;
- }
-
- @Override
- public int insertSlot(int tupleIndex, int tupleOff) {
- int slotOff = getSlotOff(tupleIndex);
- if(tupleIndex < 0) {
- slotOff = getSlotEndOff() - slotSize;
- setSlot(slotOff, tupleOff);
- return slotOff;
- }
- else {
- int slotEndOff = getSlotEndOff();
- int length = (slotOff - slotEndOff) + slotSize;
- System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize, length);
- setSlot(slotOff, tupleOff);
- return slotOff;
- }
- }
-
- @Override
- public void setFrame(IBTreeFrame frame) {
- this.frame = frame;
- }
-
- @Override
- public int getSlotOff(int tupleIndex) {
- return getSlotStartOff() - tupleIndex * slotSize;
- }
+ private static final int slotSize = 4;
+ private IBTreeFrame frame;
+
+ @Override
+ public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp,
+ FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+ if (frame.getTupleCount() <= 0)
+ return -1;
+
+ int mid;
+ int begin = 0;
+ int end = frame.getTupleCount() - 1;
+
+ while (begin <= end) {
+ mid = (begin + end) / 2;
+ frameTuple.resetByTupleIndex(frame, mid);
+
+ int cmp = multiCmp.compare(searchKey, frameTuple);
+ if (cmp < 0) {
+ end = mid - 1;
+ } else if (cmp > 0) {
+ begin = mid + 1;
+ } else {
+ if (mode == FindTupleMode.FTM_EXCLUSIVE) {
+ if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY)
+ begin = mid + 1;
+ else
+ end = mid - 1;
+ } else {
+ return mid;
+ }
+ }
+ }
+
+ if (mode == FindTupleMode.FTM_EXACT)
+ return -1;
+
+ if (matchPolicy == FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY) {
+ if (begin > frame.getTupleCount() - 1)
+ return -1;
+ frameTuple.resetByTupleIndex(frame, begin);
+ if (multiCmp.compare(searchKey, frameTuple) < 0)
+ return begin;
+ else
+ return -1;
+ } else {
+ if (end < 0)
+ return -1;
+ frameTuple.resetByTupleIndex(frame, end);
+ if (multiCmp.compare(searchKey, frameTuple) > 0)
+ return end;
+ else
+ return -1;
+ }
+ }
+
+ @Override
+ public int getTupleOff(int offset) {
+ return frame.getBuffer().getInt(offset);
+ }
+
+ @Override
+ public void setSlot(int offset, int value) {
+ frame.getBuffer().putInt(offset, value);
+ }
+
+ @Override
+ public int getSlotEndOff() {
+ return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
+ }
+
+ @Override
+ public int getSlotStartOff() {
+ return frame.getBuffer().capacity() - slotSize;
+ }
+
+ @Override
+ public int getSlotSize() {
+ return slotSize;
+ }
+
+ @Override
+ public int insertSlot(int tupleIndex, int tupleOff) {
+ int slotOff = getSlotOff(tupleIndex);
+ if (tupleIndex < 0) {
+ slotOff = getSlotEndOff() - slotSize;
+ setSlot(slotOff, tupleOff);
+ return slotOff;
+ } else {
+ int slotEndOff = getSlotEndOff();
+ int length = (slotOff - slotEndOff) + slotSize;
+ System.arraycopy(frame.getBuffer().array(), slotEndOff, frame.getBuffer().array(), slotEndOff - slotSize,
+ length);
+ setSlot(slotOff, tupleOff);
+ return slotOff;
+ }
+ }
+
+ @Override
+ public void setFrame(IBTreeFrame frame) {
+ this.frame = frame;
+ }
+
+ @Override
+ public int getSlotOff(int tupleIndex) {
+ return getSlotStartOff() - tupleIndex * slotSize;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
index 5b6dea8..697b9ab 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
@@ -19,76 +19,74 @@
import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
public class RangePredicate implements ISearchPredicate {
-
- private static final long serialVersionUID = 1L;
-
- protected boolean isForward = true;
- protected ITupleReference lowKey = null;
- protected ITupleReference highKey = null;
- protected boolean lowKeyInclusive = true;
- protected boolean highKeyInclusive = true;
- protected MultiComparator lowKeyCmp;
- protected MultiComparator highKeyCmp;
-
- public RangePredicate() {
- }
-
- public RangePredicate(boolean isForward, ITupleReference lowKey, ITupleReference highKey,
- boolean lowKeyInclusive, boolean highKeyInclusive, MultiComparator lowKeyCmp, MultiComparator highKeyCmp) {
- this.isForward = isForward;
- this.lowKey = lowKey;
- this.highKey = highKey;
- this.lowKeyInclusive = lowKeyInclusive;
- this.highKeyInclusive = highKeyInclusive;
- this.lowKeyCmp = lowKeyCmp;
- this.highKeyCmp = highKeyCmp;
- }
-
- public MultiComparator getLowKeyComparator() {
- return lowKeyCmp;
- }
-
- public MultiComparator getHighKeyComparator() {
- return highKeyCmp;
- }
-
- public void setLowKeyComparator(MultiComparator lowKeyCmp) {
- this.lowKeyCmp = lowKeyCmp;
- }
-
- public void setHighKeyComparator(MultiComparator highKeyCmp) {
- this.highKeyCmp = highKeyCmp;
- }
-
- public boolean isForward() {
- return isForward;
- }
-
- public ITupleReference getLowKey() {
- return lowKey;
- }
-
- public ITupleReference getHighKey() {
- return highKey;
- }
-
- public void setLowKey(ITupleReference lowKey, boolean lowKeyInclusive) {
- this.lowKey = lowKey;
- this.lowKeyInclusive = lowKeyInclusive;
- }
-
- public void setHighKey(ITupleReference highKey, boolean highKeyInclusive) {
- this.highKey = highKey;
- this.highKeyInclusive = highKeyInclusive;
- }
-
- public boolean isLowKeyInclusive() {
- return lowKeyInclusive;
- }
-
- public boolean isHighKeyInclusive() {
- return highKeyInclusive;
- }
+
+ private static final long serialVersionUID = 1L;
+
+ protected boolean isForward = true;
+ protected ITupleReference lowKey = null;
+ protected ITupleReference highKey = null;
+ protected boolean lowKeyInclusive = true;
+ protected boolean highKeyInclusive = true;
+ protected MultiComparator lowKeyCmp;
+ protected MultiComparator highKeyCmp;
+
+ public RangePredicate() {
+ }
+
+ public RangePredicate(boolean isForward, ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive,
+ boolean highKeyInclusive, MultiComparator lowKeyCmp, MultiComparator highKeyCmp) {
+ this.isForward = isForward;
+ this.lowKey = lowKey;
+ this.highKey = highKey;
+ this.lowKeyInclusive = lowKeyInclusive;
+ this.highKeyInclusive = highKeyInclusive;
+ this.lowKeyCmp = lowKeyCmp;
+ this.highKeyCmp = highKeyCmp;
+ }
+
+ public MultiComparator getLowKeyComparator() {
+ return lowKeyCmp;
+ }
+
+ public MultiComparator getHighKeyComparator() {
+ return highKeyCmp;
+ }
+
+ public void setLowKeyComparator(MultiComparator lowKeyCmp) {
+ this.lowKeyCmp = lowKeyCmp;
+ }
+
+ public void setHighKeyComparator(MultiComparator highKeyCmp) {
+ this.highKeyCmp = highKeyCmp;
+ }
+
+ public boolean isForward() {
+ return isForward;
+ }
+
+ public ITupleReference getLowKey() {
+ return lowKey;
+ }
+
+ public ITupleReference getHighKey() {
+ return highKey;
+ }
+
+ public void setLowKey(ITupleReference lowKey, boolean lowKeyInclusive) {
+ this.lowKey = lowKey;
+ this.lowKeyInclusive = lowKeyInclusive;
+ }
+
+ public void setHighKey(ITupleReference highKey, boolean highKeyInclusive) {
+ this.highKey = highKey;
+ this.highKeyInclusive = highKeyInclusive;
+ }
+
+ public boolean isLowKeyInclusive() {
+ return lowKeyInclusive;
+ }
+
+ public boolean isHighKeyInclusive() {
+ return highKeyInclusive;
+ }
}
-
-
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java
index 1299f04..3d3704c 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java
@@ -23,212 +23,212 @@
import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
public class RangeSearchCursor implements IBTreeCursor {
-
- private int fileId = -1;
- private ICachedPage page = null;
- private IBTreeLeafFrame frame = null;
- private IBufferCache bufferCache = null;
-
- private int tupleIndex = 0;
- private int stopTupleIndex;
- private int tupleIndexInc = 0;
-
- private FindTupleMode lowKeyFtm;
- private FindTupleMode highKeyFtm;
-
- private FindTupleNoExactMatchPolicy lowKeyFtp;
- private FindTupleNoExactMatchPolicy highKeyFtp;
-
- private IBTreeTupleReference frameTuple;
-
- private RangePredicate pred;
- private MultiComparator lowKeyCmp;
- private MultiComparator highKeyCmp;
- private ITupleReference lowKey;
- private ITupleReference highKey;
-
- public RangeSearchCursor(IBTreeLeafFrame frame) {
- this.frame = frame;
- this.frameTuple = frame.createTupleReference();
- }
-
- @Override
- public void close() throws Exception {
- if(page != null) {
- page.releaseReadLatch();
- bufferCache.unpin(page);
- page = null;
- }
- }
-
- public ITupleReference getTuple() {
- return frameTuple;
- }
-
- @Override
- public ICachedPage getPage() {
- return page;
- }
-
- private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
- ICachedPage nextLeaf = bufferCache.pin(FileInfo.getDiskPageId(fileId, nextLeafPage), false);
- nextLeaf.acquireReadLatch();
- page.releaseReadLatch();
- bufferCache.unpin(page);
-
- page = nextLeaf;
- frame.setPage(page);
- }
-
- @Override
- public boolean hasNext() throws Exception {
-
- if(pred.isForward()) {
- if(tupleIndex >= frame.getTupleCount()) {
- int nextLeafPage = frame.getNextLeaf();
- if(nextLeafPage >= 0) {
- fetchNextLeafPage(nextLeafPage);
- tupleIndex = 0;
-
- stopTupleIndex = getHighKeyIndex();
- if(stopTupleIndex < 0) return false;
- }
- else {
- return false;
- }
- }
-
- frameTuple.resetByTupleIndex(frame, tupleIndex);
- if(highKey == null || tupleIndex <= stopTupleIndex) {
- return true;
- }
- else return false;
- }
- else {
- if(tupleIndex < 0) {
- int nextLeafPage = frame.getPrevLeaf();
- if(nextLeafPage >= 0) {
- fetchNextLeafPage(nextLeafPage);
- tupleIndex = frame.getTupleCount() - 1;
-
- stopTupleIndex = getLowKeyIndex();
- if(stopTupleIndex >= frame.getTupleCount()) return false;
- }
- else {
- return false;
- }
- }
-
- frameTuple.resetByTupleIndex(frame, tupleIndex);
- if(lowKey == null || tupleIndex >= stopTupleIndex) {
- return true;
- }
- else return false;
- }
- }
+ private int fileId = -1;
+ private ICachedPage page = null;
+ private IBTreeLeafFrame frame = null;
+ private IBufferCache bufferCache = null;
- @Override
- public void next() throws Exception {
- tupleIndex += tupleIndexInc;
- }
-
- private int getLowKeyIndex() {
- int index;
- if(lowKey == null) index = 0;
- else {
- index = frame.findTupleIndex(lowKey, frameTuple, lowKeyCmp, lowKeyFtm, lowKeyFtp);
- if(pred.lowKeyInclusive) {
- index++;
- }
- else {
- if(index < 0) index = frame.getTupleCount();
- }
- }
- return index;
- }
-
- private int getHighKeyIndex() {
- int index;
- if(highKey == null) index = frame.getTupleCount() - 1;
- else {
- index = frame.findTupleIndex(highKey, frameTuple, highKeyCmp, highKeyFtm, highKeyFtp);
- if(pred.highKeyInclusive) {
- if(index < 0) index = frame.getTupleCount() - 1;
- else index--;
- }
- }
- return index;
- }
+ private int tupleIndex = 0;
+ private int stopTupleIndex;
+ private int tupleIndexInc = 0;
-
- @Override
- public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception {
- // in case open is called multiple times without closing
- if(this.page != null) {
- this.page.releaseReadLatch();
- bufferCache.unpin(this.page);
- }
-
- this.page = page;
- frame.setPage(page);
-
- pred = (RangePredicate)searchPred;
- lowKeyCmp = pred.getLowKeyComparator();
- highKeyCmp = pred.getHighKeyComparator();
-
- lowKey = pred.getLowKey();
- highKey = pred.getHighKey();
-
- // field count must be identical for lowKeyCmp and highKeyCmp (key count may be different)
- frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
-
- // init
- lowKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
- if(pred.lowKeyInclusive) {
- lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
- }
- else {
- lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
- }
-
- highKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
- if(pred.highKeyInclusive) {
- highKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
- }
- else {
- highKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
- }
-
- if(pred.isForward()) {
- tupleIndex = getLowKeyIndex();
- stopTupleIndex = getHighKeyIndex();
- tupleIndexInc = 1;
- }
- else {
- tupleIndex = getHighKeyIndex();
- stopTupleIndex = getLowKeyIndex();
- tupleIndexInc = -1;
- }
- }
-
- @Override
- public void reset() {
- tupleIndex = 0;
- page = null;
- pred = null;
- }
+ private FindTupleMode lowKeyFtm;
+ private FindTupleMode highKeyFtm;
+
+ private FindTupleNoExactMatchPolicy lowKeyFtp;
+ private FindTupleNoExactMatchPolicy highKeyFtp;
+
+ private IBTreeTupleReference frameTuple;
+
+ private RangePredicate pred;
+ private MultiComparator lowKeyCmp;
+ private MultiComparator highKeyCmp;
+ private ITupleReference lowKey;
+ private ITupleReference highKey;
+
+ public RangeSearchCursor(IBTreeLeafFrame frame) {
+ this.frame = frame;
+ this.frameTuple = frame.createTupleReference();
+ }
+
+ @Override
+ public void close() throws Exception {
+ if (page != null) {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ page = null;
+ }
+ }
+
+ public ITupleReference getTuple() {
+ return frameTuple;
+ }
+
+ @Override
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ private void fetchNextLeafPage(int nextLeafPage) throws HyracksDataException {
+ ICachedPage nextLeaf = bufferCache.pin(FileHandle.getDiskPageId(fileId, nextLeafPage), false);
+ nextLeaf.acquireReadLatch();
+
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+
+ page = nextLeaf;
+ frame.setPage(page);
+ }
+
+ @Override
+ public boolean hasNext() throws Exception {
+
+ if (pred.isForward()) {
+ if (tupleIndex >= frame.getTupleCount()) {
+ int nextLeafPage = frame.getNextLeaf();
+ if (nextLeafPage >= 0) {
+ fetchNextLeafPage(nextLeafPage);
+ tupleIndex = 0;
+
+ stopTupleIndex = getHighKeyIndex();
+ if (stopTupleIndex < 0)
+ return false;
+ } else {
+ return false;
+ }
+ }
+
+ frameTuple.resetByTupleIndex(frame, tupleIndex);
+ if (highKey == null || tupleIndex <= stopTupleIndex) {
+ return true;
+ } else
+ return false;
+ } else {
+ if (tupleIndex < 0) {
+ int nextLeafPage = frame.getPrevLeaf();
+ if (nextLeafPage >= 0) {
+ fetchNextLeafPage(nextLeafPage);
+ tupleIndex = frame.getTupleCount() - 1;
+
+ stopTupleIndex = getLowKeyIndex();
+ if (stopTupleIndex >= frame.getTupleCount())
+ return false;
+ } else {
+ return false;
+ }
+ }
+
+ frameTuple.resetByTupleIndex(frame, tupleIndex);
+ if (lowKey == null || tupleIndex >= stopTupleIndex) {
+ return true;
+ } else
+ return false;
+ }
+ }
+
+ @Override
+ public void next() throws Exception {
+ tupleIndex += tupleIndexInc;
+ }
+
+ private int getLowKeyIndex() {
+ int index;
+ if (lowKey == null)
+ index = 0;
+ else {
+ index = frame.findTupleIndex(lowKey, frameTuple, lowKeyCmp, lowKeyFtm, lowKeyFtp);
+ if (pred.lowKeyInclusive) {
+ index++;
+ } else {
+ if (index < 0)
+ index = frame.getTupleCount();
+ }
+ }
+ return index;
+ }
+
+ private int getHighKeyIndex() {
+ int index;
+ if (highKey == null)
+ index = frame.getTupleCount() - 1;
+ else {
+ index = frame.findTupleIndex(highKey, frameTuple, highKeyCmp, highKeyFtm, highKeyFtp);
+ if (pred.highKeyInclusive) {
+ if (index < 0)
+ index = frame.getTupleCount() - 1;
+ else
+ index--;
+ }
+ }
+ return index;
+ }
+
+ @Override
+ public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception {
+ // in case open is called multiple times without closing
+ if (this.page != null) {
+ this.page.releaseReadLatch();
+ bufferCache.unpin(this.page);
+ }
+
+ this.page = page;
+ frame.setPage(page);
+
+ pred = (RangePredicate) searchPred;
+ lowKeyCmp = pred.getLowKeyComparator();
+ highKeyCmp = pred.getHighKeyComparator();
+
+ lowKey = pred.getLowKey();
+ highKey = pred.getHighKey();
+
+ // field count must be identical for lowKeyCmp and highKeyCmp (key count
+ // may be different)
+ frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
+
+ // init
+ lowKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
+ if (pred.lowKeyInclusive) {
+ lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
+ } else {
+ lowKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
+ }
+
+ highKeyFtm = FindTupleMode.FTM_EXCLUSIVE;
+ if (pred.highKeyInclusive) {
+ highKeyFtp = FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY;
+ } else {
+ highKeyFtp = FindTupleNoExactMatchPolicy.FTP_LOWER_KEY;
+ }
+
+ if (pred.isForward()) {
+ tupleIndex = getLowKeyIndex();
+ stopTupleIndex = getHighKeyIndex();
+ tupleIndexInc = 1;
+ } else {
+ tupleIndex = getHighKeyIndex();
+ stopTupleIndex = getLowKeyIndex();
+ tupleIndexInc = -1;
+ }
+ }
+
+ @Override
+ public void reset() {
+ tupleIndex = 0;
+ page = null;
+ pred = null;
+ }
@Override
public void setBufferCache(IBufferCache bufferCache) {
- this.bufferCache = bufferCache;
+ this.bufferCache = bufferCache;
}
@Override
public void setFileId(int fileId) {
this.fileId = fileId;
- }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java
index 24001ff..c074642 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java
@@ -16,19 +16,18 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
public class SlotOffTupleOff implements Comparable<SlotOffTupleOff> {
- public int tupleIndex;
- public int slotOff;
- public int tupleOff;
-
- public SlotOffTupleOff(int tupleIndex, int slotOff, int recOff) {
- this.tupleIndex = tupleIndex;
- this.slotOff = slotOff;
- this.tupleOff = recOff;
- }
-
- @Override
- public int compareTo(SlotOffTupleOff o) {
- return tupleOff - o.tupleOff;
- }
-}
+ public int tupleIndex;
+ public int slotOff;
+ public int tupleOff;
+ public SlotOffTupleOff(int tupleIndex, int slotOff, int recOff) {
+ this.tupleIndex = tupleIndex;
+ this.slotOff = slotOff;
+ this.tupleOff = recOff;
+ }
+
+ @Override
+ public int compareTo(SlotOffTupleOff o) {
+ return tupleOff - o.tupleOff;
+ }
+}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java
index 6cb9999..9ad15ee 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java
@@ -16,7 +16,5 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
public enum SpaceStatus {
- INSUFFICIENT_SPACE,
- SUFFICIENT_CONTIGUOUS_SPACE,
- SUFFICIENT_SPACE
+ INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java
index 67dfc92..cf2fd74 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java
@@ -19,73 +19,72 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-public class SplitKey {
- public byte[] data = null;
- public ByteBuffer buf = null;
- public IBTreeTupleReference tuple;
- public int keySize = 0;
-
- public SplitKey(IBTreeTupleReference tuple) {
- this.tuple = tuple;
- }
-
- public void initData(int keySize) {
- // try to reuse existing memory from a lower-level split if possible
- this.keySize = keySize;
- if(data != null) {
- if(data.length < keySize + 8) {
- data = new byte[keySize + 8]; // add 8 for left and right page
- buf = ByteBuffer.wrap(data);
- }
- }
- else {
- data = new byte[keySize + 8]; // add 8 for left and right page
- buf = ByteBuffer.wrap(data);
- }
-
- tuple.resetByOffset(buf, 0);
- }
-
- public void reset() {
- data = null;
- buf = null;
- }
-
- public ByteBuffer getBuffer() {
- return buf;
- }
-
- public IBTreeTupleReference getTuple() {
- return tuple;
- }
-
- public int getLeftPage() {
- return buf.getInt(keySize);
- }
-
- public int getRightPage() {
- return buf.getInt(keySize + 4);
- }
-
- public void setLeftPage(int leftPage) {
- buf.putInt(keySize, leftPage);
- }
-
- public void setRightPage(int rightPage) {
- buf.putInt(keySize + 4, rightPage);
- }
-
- public void setPages(int leftPage, int rightPage) {
- buf.putInt(keySize, leftPage);
- buf.putInt(keySize + 4, rightPage);
- }
-
- public SplitKey duplicate(IBTreeTupleReference copyTuple) {
- SplitKey copy = new SplitKey(copyTuple);
- copy.data = data.clone();
- copy.buf = ByteBuffer.wrap(copy.data);
- copy.tuple.setFieldCount(tuple.getFieldCount());
- copy.tuple.resetByOffset(copy.buf, 0);
- return copy;
- }
+public class SplitKey {
+ public byte[] data = null;
+ public ByteBuffer buf = null;
+ public IBTreeTupleReference tuple;
+ public int keySize = 0;
+
+ public SplitKey(IBTreeTupleReference tuple) {
+ this.tuple = tuple;
+ }
+
+ public void initData(int keySize) {
+ // try to reuse existing memory from a lower-level split if possible
+ this.keySize = keySize;
+ if (data != null) {
+ if (data.length < keySize + 8) {
+ data = new byte[keySize + 8]; // add 8 for left and right page
+ buf = ByteBuffer.wrap(data);
+ }
+ } else {
+ data = new byte[keySize + 8]; // add 8 for left and right page
+ buf = ByteBuffer.wrap(data);
+ }
+
+ tuple.resetByOffset(buf, 0);
+ }
+
+ public void reset() {
+ data = null;
+ buf = null;
+ }
+
+ public ByteBuffer getBuffer() {
+ return buf;
+ }
+
+ public IBTreeTupleReference getTuple() {
+ return tuple;
+ }
+
+ public int getLeftPage() {
+ return buf.getInt(keySize);
+ }
+
+ public int getRightPage() {
+ return buf.getInt(keySize + 4);
+ }
+
+ public void setLeftPage(int leftPage) {
+ buf.putInt(keySize, leftPage);
+ }
+
+ public void setRightPage(int rightPage) {
+ buf.putInt(keySize + 4, rightPage);
+ }
+
+ public void setPages(int leftPage, int rightPage) {
+ buf.putInt(keySize, leftPage);
+ buf.putInt(keySize + 4, rightPage);
+ }
+
+ public SplitKey duplicate(IBTreeTupleReference copyTuple) {
+ SplitKey copy = new SplitKey(copyTuple);
+ copy.data = data.clone();
+ copy.buf = ByteBuffer.wrap(copy.data);
+ copy.tuple.setFieldCount(tuple.getFieldCount());
+ copy.tuple.resetByOffset(copy.buf, 0);
+ return copy;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java
index 284efc6..eb9fe2e 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import java.nio.ByteBuffer;
@@ -6,74 +21,74 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
public class SimpleTupleReference implements IBTreeTupleReference {
-
- protected ByteBuffer buf;
- protected int fieldStartIndex;
- protected int fieldCount;
- protected int tupleStartOff;
- protected int nullFlagsBytes;
- protected int fieldSlotsBytes;
-
- @Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
- this.buf = buf;
- this.tupleStartOff = tupleStartOff;
- }
-
- @Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
- }
-
- @Override
- public void setFieldCount(int fieldCount) {
- this.fieldCount = fieldCount;
- nullFlagsBytes = getNullFlagsBytes();
- fieldSlotsBytes = getFieldSlotsBytes();
- fieldStartIndex = 0;
- }
-
- @Override
- public void setFieldCount(int fieldStartIndex, int fieldCount) {
- this.fieldCount = fieldCount;
- this.fieldStartIndex = fieldStartIndex;
- }
-
- @Override
- public int getFieldCount() {
- return fieldCount;
- }
- @Override
- public byte[] getFieldData(int fIdx) {
- return buf.array();
- }
+ protected ByteBuffer buf;
+ protected int fieldStartIndex;
+ protected int fieldCount;
+ protected int tupleStartOff;
+ protected int nullFlagsBytes;
+ protected int fieldSlotsBytes;
- @Override
- public int getFieldLength(int fIdx) {
- if(fIdx == 0) {
- return buf.getShort(tupleStartOff + nullFlagsBytes);
- }
- else {
- return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2) - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx-1) * 2));
- }
- }
+ @Override
+ public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
+ this.buf = buf;
+ this.tupleStartOff = tupleStartOff;
+ }
- @Override
- public int getFieldStart(int fIdx) {
- if(fIdx == 0) {
- return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
- }
- else {
- return tupleStartOff + nullFlagsBytes + fieldSlotsBytes + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx-1) * 2));
- }
- }
-
- protected int getNullFlagsBytes() {
- return (int)Math.ceil(fieldCount / 8.0);
- }
-
- protected int getFieldSlotsBytes() {
- return fieldCount * 2;
- }
+ @Override
+ public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
+ resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ }
+
+ @Override
+ public void setFieldCount(int fieldCount) {
+ this.fieldCount = fieldCount;
+ nullFlagsBytes = getNullFlagsBytes();
+ fieldSlotsBytes = getFieldSlotsBytes();
+ fieldStartIndex = 0;
+ }
+
+ @Override
+ public void setFieldCount(int fieldStartIndex, int fieldCount) {
+ this.fieldCount = fieldCount;
+ this.fieldStartIndex = fieldStartIndex;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldCount;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return buf.array();
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ if (fIdx == 0) {
+ return buf.getShort(tupleStartOff + nullFlagsBytes);
+ } else {
+ return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
+ - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+ }
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ if (fIdx == 0) {
+ return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
+ } else {
+ return tupleStartOff + nullFlagsBytes + fieldSlotsBytes
+ + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+ }
+ }
+
+ protected int getNullFlagsBytes() {
+ return (int) Math.ceil(fieldCount / 8.0);
+ }
+
+ protected int getFieldSlotsBytes() {
+ return fieldCount * 2;
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java
index 0b1ccd0..e2966ee 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import java.nio.ByteBuffer;
@@ -8,85 +23,88 @@
public class SimpleTupleWriter implements IBTreeTupleWriter {
- @Override
- public int bytesRequired(ITupleReference tuple) {
- int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
-
- @Override
- public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
- int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
- for(int i = startField; i < startField + numFields; i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
+ @Override
+ public int bytesRequired(ITupleReference tuple) {
+ int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
- @Override
- public IBTreeTupleReference createTupleReference() {
- return new SimpleTupleReference();
- }
-
- @Override
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple);
- int fieldSlotsBytes = getFieldSlotsBytes(tuple);
- for(int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte)0);
- }
- runner += fieldSlotsBytes;
-
- int fieldEndOff = 0;
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple.getFieldLength(i));
- fieldEndOff += tuple.getFieldLength(i);
- runner += tuple.getFieldLength(i);
- targetBuf.putShort(targetOff + nullFlagsBytes + i * 2, (short)fieldEndOff);
- }
-
- return runner - targetOff;
- }
+ @Override
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+ int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+ for (int i = startField; i < startField + numFields; i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
- @Override
- public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
- for(int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte)0);
- }
- runner += getFieldSlotsBytes(tuple, startField, numFields);
-
- int fieldEndOff = 0;
- int fieldCounter = 0;
- for(int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple.getFieldLength(i));
- fieldEndOff += tuple.getFieldLength(i);
- runner += tuple.getFieldLength(i);
- targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short)fieldEndOff);
- fieldCounter++;
- }
-
- return runner - targetOff;
- }
-
- private int getNullFlagsBytes(ITupleReference tuple) {
- return (int)Math.ceil((double)tuple.getFieldCount() / 8.0);
- }
-
- private int getFieldSlotsBytes(ITupleReference tuple) {
- return tuple.getFieldCount() * 2;
- }
-
- private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
- return (int)Math.ceil((double)numFields / 8.0);
- }
-
- private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
- return numFields * 2;
- }
+ @Override
+ public IBTreeTupleReference createTupleReference() {
+ return new SimpleTupleReference();
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple);
+ int fieldSlotsBytes = getFieldSlotsBytes(tuple);
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+ runner += fieldSlotsBytes;
+
+ int fieldEndOff = 0;
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
+ .getFieldLength(i));
+ fieldEndOff += tuple.getFieldLength(i);
+ runner += tuple.getFieldLength(i);
+ targetBuf.putShort(targetOff + nullFlagsBytes + i * 2, (short) fieldEndOff);
+ }
+
+ return runner - targetOff;
+ }
+
+ @Override
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+ runner += getFieldSlotsBytes(tuple, startField, numFields);
+
+ int fieldEndOff = 0;
+ int fieldCounter = 0;
+ for (int i = startField; i < startField + numFields; i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
+ .getFieldLength(i));
+ fieldEndOff += tuple.getFieldLength(i);
+ runner += tuple.getFieldLength(i);
+ targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
+ fieldCounter++;
+ }
+
+ return runner - targetOff;
+ }
+
+ private int getNullFlagsBytes(ITupleReference tuple) {
+ return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+ }
+
+ private int getFieldSlotsBytes(ITupleReference tuple) {
+ return tuple.getFieldCount() * 2;
+ }
+
+ private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+ return (int) Math.ceil((double) numFields / 8.0);
+ }
+
+ private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ return numFields * 2;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
index 31c3c40..3b06153 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
@@ -1,15 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
public class SimpleTupleWriterFactory implements IBTreeTupleWriterFactory {
-
- private static final long serialVersionUID = 1L;
- @Override
- public IBTreeTupleWriter createTupleWriter() {
- return new SimpleTupleWriter();
- }
-
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public IBTreeTupleWriter createTupleWriter() {
+ return new SimpleTupleWriter();
+ }
+
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java
index 89325de..b82adb8 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import java.nio.ByteBuffer;
@@ -7,103 +22,99 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
public class TypeAwareTupleReference implements IBTreeTupleReference {
- protected ByteBuffer buf;
- protected int fieldStartIndex;
- protected int fieldCount;
- protected int tupleStartOff;
- protected int nullFlagsBytes;
- protected int dataStartOff;
-
- private ITypeTrait[] typeTraits;
- private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
- private int[] decodedFieldSlots;
-
- public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- this.fieldStartIndex = 0;
- }
-
- @Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
- this.buf = buf;
- this.tupleStartOff = tupleStartOff;
-
- // decode field slots
- int field = 0;
- int cumul = 0;
- int end = fieldStartIndex + fieldCount;
- encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
- for(int i = fieldStartIndex; i < end; i++) {
- int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
- if(staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
- cumul += encDec.decode();
- decodedFieldSlots[field++] = cumul;
- }
- else {
- cumul += staticDataLen;
- decodedFieldSlots[field++] = cumul;
- }
- }
- dataStartOff = encDec.getPos();
- }
-
- @Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
- }
-
- @Override
- public void setFieldCount(int fieldCount) {
- this.fieldCount = fieldCount;
- if(decodedFieldSlots == null) {
- decodedFieldSlots = new int[fieldCount];
- }
- else {
- if(fieldCount > decodedFieldSlots.length) {
- decodedFieldSlots = new int[fieldCount];
- }
- }
- nullFlagsBytes = getNullFlagsBytes();
- this.fieldStartIndex = 0;
- }
-
- @Override
- public void setFieldCount(int fieldStartIndex, int fieldCount) {
- setFieldCount(fieldCount);
- this.fieldStartIndex = fieldStartIndex;
- }
-
- @Override
- public int getFieldCount() {
- return fieldCount;
- }
+ protected ByteBuffer buf;
+ protected int fieldStartIndex;
+ protected int fieldCount;
+ protected int tupleStartOff;
+ protected int nullFlagsBytes;
+ protected int dataStartOff;
- @Override
- public byte[] getFieldData(int fIdx) {
- return buf.array();
- }
+ private ITypeTrait[] typeTraits;
+ private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+ private int[] decodedFieldSlots;
- @Override
- public int getFieldLength(int fIdx) {
- if(fIdx == 0) {
- return decodedFieldSlots[0];
- }
- else {
- return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx-1];
- }
- }
+ public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ this.fieldStartIndex = 0;
+ }
- @Override
- public int getFieldStart(int fIdx) {
- if(fIdx == 0) {
- return dataStartOff;
- }
- else {
- return dataStartOff + decodedFieldSlots[fIdx-1];
- }
- }
-
- protected int getNullFlagsBytes() {
- return (int)Math.ceil(fieldCount / 8.0);
- }
+ @Override
+ public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
+ this.buf = buf;
+ this.tupleStartOff = tupleStartOff;
+
+ // decode field slots
+ int field = 0;
+ int cumul = 0;
+ int end = fieldStartIndex + fieldCount;
+ encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
+ for (int i = fieldStartIndex; i < end; i++) {
+ int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
+ if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
+ cumul += encDec.decode();
+ decodedFieldSlots[field++] = cumul;
+ } else {
+ cumul += staticDataLen;
+ decodedFieldSlots[field++] = cumul;
+ }
+ }
+ dataStartOff = encDec.getPos();
+ }
+
+ @Override
+ public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
+ resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ }
+
+ @Override
+ public void setFieldCount(int fieldCount) {
+ this.fieldCount = fieldCount;
+ if (decodedFieldSlots == null) {
+ decodedFieldSlots = new int[fieldCount];
+ } else {
+ if (fieldCount > decodedFieldSlots.length) {
+ decodedFieldSlots = new int[fieldCount];
+ }
+ }
+ nullFlagsBytes = getNullFlagsBytes();
+ this.fieldStartIndex = 0;
+ }
+
+ @Override
+ public void setFieldCount(int fieldStartIndex, int fieldCount) {
+ setFieldCount(fieldCount);
+ this.fieldStartIndex = fieldStartIndex;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldCount;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return buf.array();
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ if (fIdx == 0) {
+ return decodedFieldSlots[0];
+ } else {
+ return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
+ }
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ if (fIdx == 0) {
+ return dataStartOff;
+ } else {
+ return dataStartOff + decodedFieldSlots[fIdx - 1];
+ }
+ }
+
+ protected int getNullFlagsBytes() {
+ return (int) Math.ceil(fieldCount / 8.0);
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java
index e47bf2f..c975dae 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import java.nio.ByteBuffer;
@@ -9,122 +24,125 @@
public class TypeAwareTupleWriter implements IBTreeTupleWriter {
- private ITypeTrait[] typeTraits;
- private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
-
- public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
-
- @Override
- public int bytesRequired(ITupleReference tuple) {
- int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
-
- @Override
- public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
- int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
- for(int i = startField; i < startField + numFields; i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
+ private ITypeTrait[] typeTraits;
+ private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
- @Override
- public IBTreeTupleReference createTupleReference() {
- return new TypeAwareTupleReference(typeTraits);
- }
-
- @Override
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple);
- // write null indicator bits
- for(int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte)0);
- }
-
- // write field slots for variable length fields
- encDec.reset(targetBuf.array(), runner);
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- if(typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- encDec.encode(tuple.getFieldLength(i));
- }
- }
- runner = encDec.getPos();
-
- // write data fields
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple.getFieldLength(i));
- runner += tuple.getFieldLength(i);
- }
-
- return runner - targetOff;
- }
+ public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
- @Override
- public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
- // write null indicator bits
- for(int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte)0);
- }
-
- // write field slots for variable length fields
- encDec.reset(targetBuf.array(), runner);
- for(int i = startField; i < startField + numFields; i++) {
- if(typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- encDec.encode(tuple.getFieldLength(i));
- }
- }
- runner = encDec.getPos();
-
- for(int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple.getFieldLength(i));
- runner += tuple.getFieldLength(i);
- }
-
- return runner - targetOff;
- }
-
- private int getNullFlagsBytes(ITupleReference tuple) {
- return (int)Math.ceil((double)tuple.getFieldCount() / 8.0);
- }
-
- private int getFieldSlotsBytes(ITupleReference tuple) {
- int fieldSlotBytes = 0;
- for(int i = 0; i < tuple.getFieldCount(); i++) {
- if(typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
- }
- }
- return fieldSlotBytes;
- }
-
- private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
- return (int)Math.ceil((double)numFields / 8.0);
- }
-
- private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
- int fieldSlotBytes = 0;
- for(int i = startField; i < startField + numFields; i++) {
- if(typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
- }
- }
- return fieldSlotBytes;
- }
-
- public ITypeTrait[] getTypeTraits() {
- return typeTraits;
- }
-
- public void setTypeTraits(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
+ @Override
+ public int bytesRequired(ITupleReference tuple) {
+ int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
+
+ @Override
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+ int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+ for (int i = startField; i < startField + numFields; i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
+
+ @Override
+ public IBTreeTupleReference createTupleReference() {
+ return new TypeAwareTupleReference(typeTraits);
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+
+ // write field slots for variable length fields
+ encDec.reset(targetBuf.array(), runner);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(tuple.getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
+
+ // write data fields
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
+ .getFieldLength(i));
+ runner += tuple.getFieldLength(i);
+ }
+
+ return runner - targetOff;
+ }
+
+ @Override
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+
+ // write field slots for variable length fields
+ encDec.reset(targetBuf.array(), runner);
+ for (int i = startField; i < startField + numFields; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(tuple.getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
+
+ for (int i = startField; i < startField + numFields; i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
+ .getFieldLength(i));
+ runner += tuple.getFieldLength(i);
+ }
+
+ return runner - targetOff;
+ }
+
+ private int getNullFlagsBytes(ITupleReference tuple) {
+ return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+ }
+
+ private int getFieldSlotsBytes(ITupleReference tuple) {
+ int fieldSlotBytes = 0;
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+ }
+ }
+ return fieldSlotBytes;
+ }
+
+ private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+ return (int) Math.ceil((double) numFields / 8.0);
+ }
+
+ private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ int fieldSlotBytes = 0;
+ for (int i = startField; i < startField + numFields; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+ }
+ }
+ return fieldSlotBytes;
+ }
+
+ public ITypeTrait[] getTypeTraits() {
+ return typeTraits;
+ }
+
+ public void setTypeTraits(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
index 12c7c1e..bfaa120 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
@@ -5,17 +20,17 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
public class TypeAwareTupleWriterFactory implements IBTreeTupleWriterFactory {
-
- private static final long serialVersionUID = 1L;
- private ITypeTrait[] typeTraits;
-
- public TypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
-
- @Override
- public IBTreeTupleWriter createTupleWriter() {
- return new TypeAwareTupleWriter(typeTraits);
- }
-
+
+ private static final long serialVersionUID = 1L;
+ private ITypeTrait[] typeTraits;
+
+ public TypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
+
+ @Override
+ public IBTreeTupleWriter createTupleWriter() {
+ return new TypeAwareTupleWriter(typeTraits);
+ }
+
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java
index b3565ba..8e40855 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java
+++ b/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java
@@ -1,83 +1,88 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.tuples;
// encodes positive integers in a variable-byte format
public class VarLenIntEncoderDecoder {
- public static final int ENCODE_MASK = 0x0000007F;
- public static final byte CONTINUE_CHUNK = (byte)0x80;
- public static final byte DECODE_MASK = (byte)0x7F;
-
- private byte[] encTmp = new byte[5];
-
- private int pos;
- private byte[] bytes;
-
- public void reset(byte[] bytes, int pos) {
- this.bytes = bytes;
- this.pos = pos;
- }
-
- public int encode(int val) {
- int origPos = 0;
- int tmpPos = 0;
- while(val > ENCODE_MASK) {
- encTmp[tmpPos++] = (byte)(val & ENCODE_MASK);
- val = val >>> 7;
- }
- encTmp[tmpPos++] = (byte)(val);
-
- // reverse order to optimize for decoding speed
- for(int i = 0; i < tmpPos-1; i++) {
- bytes[pos++] = (byte)(encTmp[tmpPos-1-i] | CONTINUE_CHUNK);
- }
- bytes[pos++] = encTmp[0];
-
- return pos - origPos;
- }
-
- public int decode() {
- int sum = 0;
- while( (bytes[pos] & CONTINUE_CHUNK) == CONTINUE_CHUNK) {
- sum = (sum + (bytes[pos] & DECODE_MASK)) << 7;
- pos++;
- }
- sum += bytes[pos++];
- return sum;
- }
-
- // calculate the number of bytes needed for encoding
- public int getBytesRequired(int val) {
- int byteCount = 0;
- while(val > ENCODE_MASK) {
- val = val >>> 7;
- byteCount++;
- }
- return byteCount + 1;
- }
-
- public int getPos() {
- return pos;
- }
-
- // fast encoding, slow decoding version
- /*
- public void encode(int val) {
- while(val > ENCODE_MASK) {
- bytes[pos++] = (byte)(((byte)(val & ENCODE_MASK)) | CONTINUE_CHUNK);
- val = val >>> 7;
- }
- bytes[pos++] = (byte)(val);
- }
-
- public int decode() {
- int sum = 0;
- int shift = 0;
- while( (bytes[pos] & CONTINUE_CHUNK) == CONTINUE_CHUNK) {
- sum = (sum + (bytes[pos] & DECODE_MASK)) << 7 * shift++;
- pos++;
- }
- sum += bytes[pos++] << 7 * shift;
- return sum;
- }
- */
+ public static final int ENCODE_MASK = 0x0000007F;
+ public static final byte CONTINUE_CHUNK = (byte) 0x80;
+ public static final byte DECODE_MASK = (byte) 0x7F;
+
+ private byte[] encTmp = new byte[5];
+
+ private int pos;
+ private byte[] bytes;
+
+ public void reset(byte[] bytes, int pos) {
+ this.bytes = bytes;
+ this.pos = pos;
+ }
+
+ public int encode(int val) {
+ int origPos = 0;
+ int tmpPos = 0;
+ while (val > ENCODE_MASK) {
+ encTmp[tmpPos++] = (byte) (val & ENCODE_MASK);
+ val = val >>> 7;
+ }
+ encTmp[tmpPos++] = (byte) (val);
+
+ // reverse order to optimize for decoding speed
+ for (int i = 0; i < tmpPos - 1; i++) {
+ bytes[pos++] = (byte) (encTmp[tmpPos - 1 - i] | CONTINUE_CHUNK);
+ }
+ bytes[pos++] = encTmp[0];
+
+ return pos - origPos;
+ }
+
+ public int decode() {
+ int sum = 0;
+ while ((bytes[pos] & CONTINUE_CHUNK) == CONTINUE_CHUNK) {
+ sum = (sum + (bytes[pos] & DECODE_MASK)) << 7;
+ pos++;
+ }
+ sum += bytes[pos++];
+ return sum;
+ }
+
+ // calculate the number of bytes needed for encoding
+ public int getBytesRequired(int val) {
+ int byteCount = 0;
+ while (val > ENCODE_MASK) {
+ val = val >>> 7;
+ byteCount++;
+ }
+ return byteCount + 1;
+ }
+
+ public int getPos() {
+ return pos;
+ }
+
+ // fast encoding, slow decoding version
+ /*
+ * public void encode(int val) { while(val > ENCODE_MASK) { bytes[pos++] =
+ * (byte)(((byte)(val & ENCODE_MASK)) | CONTINUE_CHUNK); val = val >>> 7; }
+ * bytes[pos++] = (byte)(val); }
+ *
+ * public int decode() { int sum = 0; int shift = 0; while( (bytes[pos] &
+ * CONTINUE_CHUNK) == CONTINUE_CHUNK) { sum = (sum + (bytes[pos] &
+ * DECODE_MASK)) << 7 * shift++; pos++; } sum += bytes[pos++] << 7 * shift;
+ * return sum; }
+ */
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
index 0730254..ff8160c 100644
--- a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
+++ b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
@@ -16,8 +16,6 @@
package edu.uci.ics.hyracks.storage.am.btree;
import java.io.DataOutput;
-import java.io.File;
-import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.Random;
@@ -40,6 +38,7 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.DummySMI;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
@@ -47,36 +46,29 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class BTreeFieldPrefixNSMTest {
-
- //private static final int PAGE_SIZE = 8192;
- //private static final int PAGE_SIZE = 8192;
- private static final int PAGE_SIZE = 32768; // 32K
- //private static final int PAGE_SIZE = 65536; // 64K
- //private static final int PAGE_SIZE = 131072; // 128K
+
+ private static final int PAGE_SIZE = 32768; // 32K
private static final int NUM_PAGES = 40;
private static final int HYRACKS_FRAME_SIZE = 128;
-
+
private String tmpDir = System.getProperty("java.io.tmpdir");
-
+
// to help with the logger madness
private void print(String str) {
- System.out.print(str);
-
-// if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
-// GlobalConfig.ASTERIX_LOGGER.finest(str);
-// }
- }
-
+ System.out.print(str);
+
+ // if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
+ // GlobalConfig.ASTERIX_LOGGER.finest(str);
+ // }
+ }
+
public class BufferAllocator implements ICacheMemoryAllocator {
@Override
public ByteBuffer[] allocate(int pageSize, int numPages) {
@@ -87,59 +79,58 @@
return buffers;
}
}
-
+
private ITupleReference createTuple(int f0, int f1, int f2, boolean print) throws HyracksDataException {
- if(print) System.out.println("CREATING: " + f0 + " " + f1 + " " + f2);
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ if (print)
+ System.out.println("CREATING: " + f0 + " " + f1 + " " + f2);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
ByteBuffer buf = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(buf);
- FrameTupleReference tuple = new FrameTupleReference();
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
-
- appender.reset(buf, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- return tuple;
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(buf);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(buf, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ return tuple;
}
-
+
@Test
public void test01() throws Exception {
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare fields
int fieldCount = 3;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(4);
typeTraits[1] = new TypeTrait(4);
typeTraits[2] = new TypeTrait(4);
-
+
// declare keys
int keyFieldCount = 3;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
@@ -147,106 +138,106 @@
cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
cmps[2] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
+
// just for printing
- ISerializerDeserializer[] sers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
+ ISerializerDeserializer[] sers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
Random rnd = new Random();
rnd.setSeed(50);
-
- ICachedPage page = bufferCache.pin(FileInfo.getDiskPageId(fileId, 0), false);
+
+ ICachedPage page = bufferCache.pin(FileHandle.getDiskPageId(fileId, 0), false);
try {
-
+
IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
IBTreeTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
- FieldPrefixNSMLeafFrame frame = new FieldPrefixNSMLeafFrame(tupleWriter);
- frame.setPage(page);
- frame.initBuffer((byte)0);
- slotManager.setFrame(frame);
+ FieldPrefixNSMLeafFrame frame = new FieldPrefixNSMLeafFrame(tupleWriter);
+ frame.setPage(page);
+ frame.initBuffer((byte) 0);
+ slotManager.setFrame(frame);
frame.setPrefixTupleCount(0);
-
+
String before = new String();
String after = new String();
-
+
int compactFreq = 5;
- int compressFreq = 5;
- int smallMax = 10;
- int numRecords = 1000;
-
- int[][] savedFields = new int[numRecords][3];
-
- // insert records with random calls to compact and compress
- for(int i = 0; i < numRecords; i++) {
-
- if((i+1) % 100 == 0) print("INSERTING " + (i+1) + " / " + numRecords + "\n");
-
- int a = rnd.nextInt() % smallMax;
- int b = rnd.nextInt() % smallMax;
- int c = i;
-
- ITupleReference tuple = createTuple(a, b, c, false);
- try {
- frame.insert(tuple, cmp);
- }
- catch (BTreeException e) {
- e.printStackTrace();
- }
- catch (Exception e) {
- e.printStackTrace();
- }
-
- savedFields[i][0] = a;
- savedFields[i][1] = b;
- savedFields[i][2] = c;
-
- if(rnd.nextInt() % compactFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compact(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
-
- if(rnd.nextInt() % compressFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compress(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
-
- }
-
- // delete records with random calls to compact and compress
- for(int i = 0; i < numRecords; i++) {
-
- if((i+1) % 100 == 0) print("DELETING " + (i+1) + " / " + numRecords + "\n");
-
- ITupleReference tuple = createTuple(savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
- try {
- frame.delete(tuple, cmp, true);
- }
- catch (Exception e) {
- }
-
- if(rnd.nextInt() % compactFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compact(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
-
- if(rnd.nextInt() % compressFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compress(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
- }
-
- } finally {
+ int compressFreq = 5;
+ int smallMax = 10;
+ int numRecords = 1000;
+
+ int[][] savedFields = new int[numRecords][3];
+
+ // insert records with random calls to compact and compress
+ for (int i = 0; i < numRecords; i++) {
+
+ if ((i + 1) % 100 == 0)
+ print("INSERTING " + (i + 1) + " / " + numRecords + "\n");
+
+ int a = rnd.nextInt() % smallMax;
+ int b = rnd.nextInt() % smallMax;
+ int c = i;
+
+ ITupleReference tuple = createTuple(a, b, c, false);
+ try {
+ frame.insert(tuple, cmp);
+ } catch (BTreeException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ savedFields[i][0] = a;
+ savedFields[i][1] = b;
+ savedFields[i][2] = c;
+
+ if (rnd.nextInt() % compactFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compact(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
+
+ if (rnd.nextInt() % compressFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compress(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
+
+ }
+
+ // delete records with random calls to compact and compress
+ for (int i = 0; i < numRecords; i++) {
+
+ if ((i + 1) % 100 == 0)
+ print("DELETING " + (i + 1) + " / " + numRecords + "\n");
+
+ ITupleReference tuple = createTuple(savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
+ try {
+ frame.delete(tuple, cmp, true);
+ } catch (Exception e) {
+ }
+
+ if (rnd.nextInt() % compactFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compact(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
+
+ if (rnd.nextInt() % compressFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compress(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
+ }
+
+ } finally {
bufferCache.unpin(page);
- }
-
+ }
+
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
}
}
diff --git a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
index 0f083d6..74dabe0 100644
--- a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -16,8 +16,6 @@
package edu.uci.ics.hyracks.storage.am.btree;
import java.io.DataOutput;
-import java.io.File;
-import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.Random;
@@ -40,6 +38,7 @@
import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.DummySMI;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
@@ -60,35 +59,28 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
import edu.uci.ics.hyracks.storage.am.btree.tuples.SimpleTupleWriterFactory;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@SuppressWarnings("unchecked")
+public class BTreeTest {
-public class BTreeTest {
-
- //private static final int PAGE_SIZE = 128;
- //private static final int PAGE_SIZE = 8192;
- private static final int PAGE_SIZE = 256;
+ private static final int PAGE_SIZE = 256;
private static final int NUM_PAGES = 10;
private static final int HYRACKS_FRAME_SIZE = 128;
-
+
private String tmpDir = System.getProperty("java.io.tmpdir");
-
+
// to help with the logger madness
private void print(String str) {
- System.out.print(str);
-
-// if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
-// GlobalConfig.ASTERIX_LOGGER.finest(str);
-// }
- }
-
+ System.out.print(str);
+
+ // if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
+ // GlobalConfig.ASTERIX_LOGGER.finest(str);
+ // }
+ }
+
public class BufferAllocator implements ICacheMemoryAllocator {
@Override
public ByteBuffer[] allocate(int pageSize, int numPages) {
@@ -99,269 +91,48 @@
return buffers;
}
}
-
+
// FIXED-LENGTH KEY TEST
- // create a B-tree with one fixed-length "key" field and one fixed-length "value" field
+ // create a B-tree with one fixed-length "key" field and one fixed-length
+ // "value" field
// fill B-tree with random values using insertions (not bulk load)
// perform ordered scan and range search
@Test
public void test01() throws Exception {
-
- print("FIXED-LENGTH KEY TEST\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+
+ print("FIXED-LENGTH KEY TEST\n");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare fields
int fieldCount = 2;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(4);
typeTraits[1] = new TypeTrait(4);
-
+
// declare keys
int keyFieldCount = 1;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
+
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
+
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
+
IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- long start = System.currentTimeMillis();
-
- print("INSERTING INTO TREE\n");
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
- ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE};
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- // 10000
- for (int i = 0; i < 10000; i++) {
-
- int f0 = rnd.nextInt() % 10000;
- int f1 = 5;
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- //System.out.println(tuple.getFieldCount() + " " + tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
-
- if (i % 1000 == 0) {
- long end = System.currentTimeMillis();
- print("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start) + "\n");
- }
-
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (BTreeException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- //btree.printTree(leafFrame, interiorFrame);
- //System.out.println();
- }
- //btree.printTree(leafFrame, interiorFrame);
- //System.out.println();
-
- int maxPage = btree.getMaxPage(metaFrame);
- System.out.println("MAXPAGE: " + maxPage);
-
- String stats = btree.printStats();
- print(stats);
-
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
-
- // ordered scan
-
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
-
-
- // disk-order scan
- print("DISK-ORDER SCAN:\n");
- DiskOrderScanCursor diskOrderCursor = new DiskOrderScanCursor(leafFrame);
- btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
-
-
- // range search in [-1000, 1000]
- print("RANGE SEARCH:\n");
-
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
-
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
-
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
-
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
-
- btree.close();
-
- bufferCache.close();
- fileManager.close();
- print("\n");
- }
-
-
- // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
- // create a B-tree with one two fixed-length "key" fields and one fixed-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test02() throws Exception {
-
- print("COMPOSITE KEY TEST\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
-
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
@@ -371,68 +142,285 @@
rnd.setSeed(50);
long start = System.currentTimeMillis();
-
+
print("INSERTING INTO TREE\n");
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE};
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- for (int i = 0; i < 10000; i++) {
- int f0 = rnd.nextInt() % 2000;
- int f1 = rnd.nextInt() % 1000;
- int f2 = 5;
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ // 10000
+ for (int i = 0; i < 10000; i++) {
+
+ int f0 = rnd.nextInt() % 10000;
+ int f1 = 5;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ // System.out.println(tuple.getFieldCount() + " " +
+ // tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
+
if (i % 1000 == 0) {
- print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+ long end = System.currentTimeMillis();
+ print("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start) + "\n");
}
-
+
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (BTreeException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ // btree.printTree(leafFrame, interiorFrame);
+ // System.out.println();
+ }
+ // btree.printTree(leafFrame, interiorFrame);
+ // System.out.println();
+
+ int maxPage = btree.getMaxPage(metaFrame);
+ System.out.println("MAXPAGE: " + maxPage);
+
+ String stats = btree.printStats();
+ print(stats);
+
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
+
+ // ordered scan
+
+ print("ORDERED SCAN:\n");
+ IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ // disk-order scan
+ print("DISK-ORDER SCAN:\n");
+ DiskOrderScanCursor diskOrderCursor = new DiskOrderScanCursor(leafFrame);
+ btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
+
+ // range search in [-1000, 1000]
+ print("RANGE SEARCH:\n");
+
+ IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ print("\n");
+ }
+
+ // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
+ // create a B-tree with one two fixed-length "key" fields and one
+ // fixed-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test02() throws Exception {
+
+ print("COMPOSITE KEY TEST\n");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
+
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+ IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+ BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ long start = System.currentTimeMillis();
+
+ print("INSERTING INTO TREE\n");
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ ByteBuffer frame = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ for (int i = 0; i < 10000; i++) {
+ int f0 = rnd.nextInt() % 2000;
+ int f1 = rnd.nextInt() % 1000;
+ int f2 = 5;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+ }
+
try {
btree.insert(tuple, insertOpCtx);
} catch (Exception e) {
}
}
- //btree.printTree(leafFrame, interiorFrame);
-
+ // btree.printTree(leafFrame, interiorFrame);
+
long end = System.currentTimeMillis();
long duration = end - start;
- print("DURATION: " + duration + "\n");
-
+ print("DURATION: " + duration + "\n");
+
// try a simple index scan
- print("ORDERED SCAN:\n");
+ print("ORDERED SCAN:\n");
IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
btree.search(scanCursor, nullPred, searchOpCtx);
-
+
try {
while (scanCursor.hasNext()) {
scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
+ ITupleReference frameTuple = scanCursor.getTuple();
String rec = cmp.printTuple(frameTuple, recDescSers);
print(rec + "\n");
}
@@ -443,559 +431,56 @@
}
// range search in [(-3),(3)]
- print("RANGE SEARCH:\n");
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
-
-
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
-
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use only a single comparator for searching
-
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
-
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
-
- btree.close();
-
- bufferCache.close();
- fileManager.close();
-
- print("\n");
- }
-
- // VARIABLE-LENGTH TEST
- // create a B-tree with one variable-length "key" field and one variable-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test03() throws Exception {
-
- print("VARIABLE-LENGTH KEY TEST\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- //TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
- ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
- int maxLength = 10; // max string length to be generated
- for (int i = 0; i < 10000; i++) {
-
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (i % 1000 == 0) {
- //print("INSERTING " + i + ": " + cmp.printRecord(record, 0) + "\n");
- print("INSERTING " + i + "\n");
- }
-
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- //e.printStackTrace();
- }
- }
- // btree.printTree();
-
- System.out.println("DONE INSERTING");
-
- // ordered scan
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
-
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
-
- // range search in ["cbf", cc7"]
- print("RANGE SEARCH:\n");
-
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
-
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
-
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
-
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
-
- btree.close();
-
- bufferCache.close();
- fileManager.close();
-
- print("\n");
- }
-
-
- // DELETION TEST
- // create a B-tree with one variable-length "key" field and one variable-length "value" field
- // fill B-tree with random values using insertions, then delete entries one-by-one
- // repeat procedure a few times on same B-tree
- @Test
- public void test04() throws Exception {
-
- print("DELETION TEST\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
-
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
- ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
- BTreeOpContext deleteOpCtx = btree.createOpContext(BTreeOp.BTO_DELETE, leafFrame, interiorFrame, metaFrame);
-
- int runs = 3;
- for (int run = 0; run < runs; run++) {
-
- print("DELETION TEST RUN: " + (run+1) + "/" + runs + "\n");
-
- print("INSERTING INTO BTREE\n");
- int maxLength = 10;
- int ins = 10000;
- String[] f0s = new String[ins];
- String[] f1s = new String[ins];
- int insDone = 0;
- int[] insDoneCmp = new int[ins];
- for (int i = 0; i < ins; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
-
- f0s[i] = f0;
- f1s[i] = f1;
-
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (i % 1000 == 0) {
- print("INSERTING " + i + "\n");
- //print("INSERTING " + i + ": " + cmp.printRecord(record, 0) + "\n");
- }
-
- try {
- btree.insert(tuple, insertOpCtx);
- insDone++;
- } catch (BTreeException e) {
- //e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- insDoneCmp[i] = insDone;
- }
- // btree.printTree();
- // btree.printStats();
-
- print("DELETING FROM BTREE\n");
- int delDone = 0;
- for (int i = 0; i < ins; i++) {
-
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (i % 1000 == 0) {
- //print("DELETING " + i + ": " + cmp.printRecord(records[i], 0) + "\n");
- print("DELETING " + i + "\n");
- }
-
- try {
- btree.delete(tuple, deleteOpCtx);
- delDone++;
- } catch (BTreeException e) {
- //e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
-
- if (insDoneCmp[i] != delDone) {
- print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
- print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
- break;
- }
- // btree.printTree();
- }
- //btree.printTree(leafFrame, interiorFrame);
-
- if (insDone != delDone) {
- print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
- break;
- }
- }
-
- btree.close();
-
- bufferCache.close();
- fileManager.close();
-
- print("\n");
- }
-
- // BULK LOAD TEST
- // insert 100,000 records in bulk
- // B-tree has a composite key to "simulate" non-unique index creation
- // do range search
- @Test
- public void test05() throws Exception {
-
- print("BULK LOAD TEST\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
-
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
- ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- BTree.BulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
-
- // generate sorted records
- int ins = 100000;
- print("BULK LOADING " + ins + " RECORDS\n");
- long start = System.currentTimeMillis();
- for (int i = 0; i < ins; i++) {
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
- }
-
- btree.endBulkLoad(bulkLoadCtx);
-
- //btree.printTree(leafFrame, interiorFrame);
-
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
-
- // range search
print("RANGE SEARCH:\n");
IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
-
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
-
+
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
IBinaryComparator[] searchCmps = new IBinaryComparator[1];
searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- // TODO: check when searching backwards
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
+ // only
+ // a
+ // single
+ // comparator
+ // for
+ // searching
+
RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
btree.search(rangeCursor, rangePred, searchOpCtx);
-
+
try {
while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
String rec = cmp.printTuple(frameTuple, recDescSers);
print(rec + "\n");
}
@@ -1006,54 +491,53 @@
}
btree.close();
-
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
-
+
print("\n");
- }
-
- // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
- // demo for Arjun to show easy support of intersection queries on time-intervals
+ }
+
+ // VARIABLE-LENGTH TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
@Test
- public void test06() throws Exception {
+ public void test03() throws Exception {
- print("TIME-INTERVAL INTERSECTION DEMO\n");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
+ print("VARIABLE-LENGTH KEY TEST\n");
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare fields
- int fieldCount = 3;
+ int fieldCount = 2;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
-
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
// declare keys
- int keyFieldCount = 2;
+ int keyFieldCount = 1;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+
+ SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ // TypeAwareTupleWriterFactory tupleWriterFactory = new
+ // TypeAwareTupleWriterFactory(typeTraits);
IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
+
IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
@@ -1061,21 +545,525 @@
Random rnd = new Random();
rnd.setSeed(50);
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+ int maxLength = 10; // max string length to be generated
+ for (int i = 0; i < 10000; i++) {
+
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ // print("INSERTING " + i + ": " + cmp.printRecord(record, 0) +
+ // "\n");
+ print("INSERTING " + i + "\n");
+ }
+
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ // e.printStackTrace();
+ }
+ }
+ // btree.printTree();
+
+ System.out.println("DONE INSERTING");
+
+ // ordered scan
+ print("ORDERED SCAN:\n");
+ IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
+
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ // range search in ["cbf", cc7"]
+ print("RANGE SEARCH:\n");
+
+ IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ print("\n");
+ }
+
+ // DELETION TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions, then delete entries
+ // one-by-one
+ // repeat procedure a few times on same B-tree
+ @Test
+ public void test04() throws Exception {
+
+ print("DELETION TEST\n");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+ IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+ BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ ByteBuffer frame = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext deleteOpCtx = btree.createOpContext(BTreeOp.BTO_DELETE, leafFrame, interiorFrame, metaFrame);
+
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
+
+ print("DELETION TEST RUN: " + (run + 1) + "/" + runs + "\n");
+
+ print("INSERTING INTO BTREE\n");
+ int maxLength = 10;
+ int ins = 10000;
+ String[] f0s = new String[ins];
+ String[] f1s = new String[ins];
+ int insDone = 0;
+ int[] insDoneCmp = new int[ins];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+ f0s[i] = f0;
+ f1s[i] = f1;
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + "\n");
+ // print("INSERTING " + i + ": " + cmp.printRecord(record,
+ // 0) + "\n");
+ }
+
+ try {
+ btree.insert(tuple, insertOpCtx);
+ insDone++;
+ } catch (BTreeException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ insDoneCmp[i] = insDone;
+ }
+ // btree.printTree();
+ // btree.printStats();
+
+ print("DELETING FROM BTREE\n");
+ int delDone = 0;
+ for (int i = 0; i < ins; i++) {
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ // print("DELETING " + i + ": " +
+ // cmp.printRecord(records[i], 0) + "\n");
+ print("DELETING " + i + "\n");
+ }
+
+ try {
+ btree.delete(tuple, deleteOpCtx);
+ delDone++;
+ } catch (BTreeException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ if (insDoneCmp[i] != delDone) {
+ print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
+ print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
+ break;
+ }
+ // btree.printTree();
+ }
+ // btree.printTree(leafFrame, interiorFrame);
+
+ if (insDone != delDone) {
+ print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+ break;
+ }
+ }
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ print("\n");
+ }
+
+ // BULK LOAD TEST
+ // insert 100,000 records in bulk
+ // B-tree has a composite key to "simulate" non-unique index creation
+ // do range search
+ @Test
+ public void test05() throws Exception {
+
+ print("BULK LOAD TEST\n");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
+
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+ IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+ BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ ByteBuffer frame = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTree.BulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
+
+ // generate sorted records
+ int ins = 100000;
+ print("BULK LOADING " + ins + " RECORDS\n");
long start = System.currentTimeMillis();
-
+ for (int i = 0; i < ins; i++) {
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
+ }
+
+ btree.endBulkLoad(bulkLoadCtx);
+
+ // btree.printTree(leafFrame, interiorFrame);
+
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
+
+ // range search
+ print("RANGE SEARCH:\n");
+ IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+ // TODO: check when searching backwards
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ print("\n");
+ }
+
+ // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
+ // demo for Arjun to show easy support of intersection queries on
+ // time-intervals
+ @Test
+ public void test06() throws Exception {
+
+ print("TIME-INTERVAL INTERSECTION DEMO\n");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
+
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+ IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+ BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ ByteBuffer frame = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ long start = System.currentTimeMillis();
+
int intervalCount = 10;
int[][] intervals = new int[intervalCount][2];
@@ -1110,28 +1098,28 @@
intervals[9][1] = 35;
BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
+
// int exceptionCount = 0;
- for (int i = 0; i < intervalCount; i++) {
- int f0 = intervals[i][0];
- int f1 = intervals[i][1];
- int f2 = rnd.nextInt() % 100;
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- //print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
- print("INSERTING " + i + "\n");
+ for (int i = 0; i < intervalCount; i++) {
+ int f0 = intervals[i][0];
+ int f1 = intervals[i][1];
+ int f2 = rnd.nextInt() % 100;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ // print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+ print("INSERTING " + i + "\n");
try {
btree.insert(tuple, insertOpCtx);
@@ -1157,7 +1145,7 @@
try {
while (scanCursor.hasNext()) {
scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
+ ITupleReference frameTuple = scanCursor.getTuple();
String rec = cmp.printTuple(frameTuple, recDescSers);
print(rec + "\n");
}
@@ -1171,55 +1159,56 @@
print("RANGE SEARCH:\n");
IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
- // build low and high keys
+ // build low and high keys
ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
-
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
IBinaryComparator[] searchCmps = new IBinaryComparator[2];
searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- //print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " + cmp.printKey(highKey, 0) + "\n");
-
+
+ // print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " +
+ // cmp.printKey(highKey, 0) + "\n");
+
RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
btree.search(rangeCursor, rangePred, searchOpCtx);
-
+
try {
while (rangeCursor.hasNext()) {
rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
+ ITupleReference frameTuple = rangeCursor.getTuple();
String rec = cmp.printTuple(frameTuple, recDescSers);
print(rec + "\n");
}
@@ -1230,13 +1219,12 @@
}
btree.close();
-
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
-
+
print("\n");
}
-
+
public static String randomString(int length, Random random) {
String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
StringBuilder strBuilder = new StringBuilder();
@@ -1244,5 +1232,5 @@
strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
}
return strBuilder.toString();
- }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
index 9bfaa6b..7ec564a 100644
--- a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
+++ b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
@@ -1,11 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.File;
-import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
@@ -32,6 +45,7 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.DummySMI;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
@@ -51,25 +65,17 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class RangeSearchCursorTest {
- private static final int PAGE_SIZE = 256;
+ private static final int PAGE_SIZE = 256;
private static final int NUM_PAGES = 10;
private static final int HYRACKS_FRAME_SIZE = 128;
-
+
private String tmpDir = System.getProperty("java.io.tmpdir");
-
- private void print(String str) {
- System.out.print(str);
- }
-
+
public class BufferAllocator implements ICacheMemoryAllocator {
@Override
public ByteBuffer[] allocate(int pageSize, int numPages) {
@@ -80,464 +86,464 @@
return buffers;
}
}
-
+
// declare fields
int fieldCount = 2;
ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
+
IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
-
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE};
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
- FrameTupleReference tuple = new FrameTupleReference();
-
- Random rnd = new Random(50);
-
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, recDesc);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ Random rnd = new Random(50);
+
@Before
public void setUp() {
- typeTraits[0] = new TypeTrait(4);
+ typeTraits[0] = new TypeTrait(4);
typeTraits[1] = new TypeTrait(4);
accessor.reset(frame);
- }
-
+ }
+
@Test
public void uniqueIndexTest() throws Exception {
-
- System.out.println("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+
+ System.out.println("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare keys
int keyFieldCount = 1;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
+
BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
btree.open(fileId);
-
+
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- // generate keys
- int numKeys = 50;
- int maxKey = 1000;
- TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
- ArrayList<Integer> keys = new ArrayList<Integer>();
- while(uniqueKeys.size() < numKeys) {
- int key = rnd.nextInt() % maxKey;
- uniqueKeys.add(key);
- }
- for(Integer i : uniqueKeys) {
- keys.add(i);
- }
-
- // insert keys into btree
- for (int i = 0; i < keys.size(); i++) {
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- try {
+ DataOutput dos = tb.getDataOutput();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ // generate keys
+ int numKeys = 50;
+ int maxKey = 1000;
+ TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
+ ArrayList<Integer> keys = new ArrayList<Integer>();
+ while (uniqueKeys.size() < numKeys) {
+ int key = rnd.nextInt() % maxKey;
+ uniqueKeys.add(key);
+ }
+ for (Integer i : uniqueKeys) {
+ keys.add(i);
+ }
+
+ // insert keys into btree
+ for (int i = 0; i < keys.size(); i++) {
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
btree.insert(tuple, insertOpCtx);
- } catch (BTreeException e) {
+ } catch (BTreeException e) {
} catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- //btree.printTree(leafFrame, interiorFrame, recDescSers);
-
+ e.printStackTrace();
+ }
+ }
+
+ // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
int minSearchKey = -100;
int maxSearchKey = 100;
-
- //System.out.println("STARTING SEARCH TESTS");
-
+
+ // System.out.println("STARTING SEARCH TESTS");
+
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
// backward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
-
- btree.close();
+ btree.close();
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
- }
-
+ }
+
@Test
public void nonUniqueIndexTest() throws Exception {
-
- System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+
+ System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare keys
int keyFieldCount = 2;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
+
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
+
BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
btree.open(fileId);
-
+
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- // generate keys
- int numKeys = 50;
- int maxKey = 10;
- ArrayList<Integer> keys = new ArrayList<Integer>();
- for(int i = 0; i < numKeys; i++) {
- int k = rnd.nextInt() % maxKey;
- keys.add(k);
- }
- Collections.sort(keys);
-
- // insert keys into btree
- for (int i = 0; i < keys.size(); i++) {
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- try {
+ DataOutput dos = tb.getDataOutput();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ // generate keys
+ int numKeys = 50;
+ int maxKey = 10;
+ ArrayList<Integer> keys = new ArrayList<Integer>();
+ for (int i = 0; i < numKeys; i++) {
+ int k = rnd.nextInt() % maxKey;
+ keys.add(k);
+ }
+ Collections.sort(keys);
+
+ // insert keys into btree
+ for (int i = 0; i < keys.size(); i++) {
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
btree.insert(tuple, insertOpCtx);
- } catch (BTreeException e) {
+ } catch (BTreeException e) {
} catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- //btree.printTree(leafFrame, interiorFrame, recDescSers);
-
+ e.printStackTrace();
+ }
+ }
+
+ // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
int minSearchKey = -100;
int maxSearchKey = 100;
-
- //System.out.println("STARTING SEARCH TESTS");
-
+
+ // System.out.println("STARTING SEARCH TESTS");
+
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
// backward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
-
- btree.close();
+ btree.close();
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
- }
-
+ }
+
@Test
public void nonUniqueFieldPrefixIndexTest() throws Exception {
-
- System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
-
- IBTreeLeafFrameFactory leafFrameFactory = new FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
-
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
+
+ System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
+
+ IBTreeLeafFrameFactory leafFrameFactory = new FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+ IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
// declare keys
int keyFieldCount = 2;
IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
+
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
+
BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
+ btree.open(fileId);
+
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- // generate keys
- int numKeys = 50;
- int maxKey = 10;
- ArrayList<Integer> keys = new ArrayList<Integer>();
- for(int i = 0; i < numKeys; i++) {
- int k = rnd.nextInt() % maxKey;
- keys.add(k);
- }
- Collections.sort(keys);
-
- // insert keys into btree
- for (int i = 0; i < keys.size(); i++) {
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- try {
+ DataOutput dos = tb.getDataOutput();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ // generate keys
+ int numKeys = 50;
+ int maxKey = 10;
+ ArrayList<Integer> keys = new ArrayList<Integer>();
+ for (int i = 0; i < numKeys; i++) {
+ int k = rnd.nextInt() % maxKey;
+ keys.add(k);
+ }
+ Collections.sort(keys);
+
+ // insert keys into btree
+ for (int i = 0; i < keys.size(); i++) {
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
btree.insert(tuple, insertOpCtx);
- } catch (BTreeException e) {
+ } catch (BTreeException e) {
} catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- //btree.printTree(leafFrame, interiorFrame, recDescSers);
-
+ e.printStackTrace();
+ }
+ }
+
+ // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
int minSearchKey = -100;
int maxSearchKey = 100;
-
- //System.out.println("STARTING SEARCH TESTS");
-
+
+ // System.out.println("STARTING SEARCH TESTS");
+
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
// backward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
-
- btree.close();
+ btree.close();
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
- }
-
- public RangePredicate createRangePredicate(int lk, int hk, boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive, MultiComparator cmp, ITypeTrait[] typeTraits) throws HyracksDataException {
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
- DataOutput kdos = ktb.getDataOutput();
-
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
- keyAccessor.reset(frame);
-
- appender.reset(frame, true);
-
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(lk, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(hk, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
-
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
-
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
+ }
+
+ public RangePredicate createRangePredicate(int lk, int hk, boolean isForward, boolean lowKeyInclusive,
+ boolean highKeyInclusive, MultiComparator cmp, ITypeTrait[] typeTraits) throws HyracksDataException {
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx, keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(lk, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(hk, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
IBinaryComparator[] searchCmps = new IBinaryComparator[1];
searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- RangePredicate rangePred = new RangePredicate(isForward, lowKey, highKey, lowKeyInclusive, highKeyInclusive, searchCmp, searchCmp);
+
+ RangePredicate rangePred = new RangePredicate(isForward, lowKey, highKey, lowKeyInclusive, highKeyInclusive,
+ searchCmp, searchCmp);
return rangePred;
}
-
- public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk, boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive) {
-
- // special cases
- if(lk == hk && (!lowKeyInclusive || !highKeyInclusive)) return;
- if(lk > hk) return;
-
- if(isForward) {
- for(int i = 0; i < keys.size(); i++) {
- if( (lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive) ) {
- expectedResults.add(keys.get(i));
- continue;
- }
-
- if(lk < keys.get(i) && hk > keys.get(i)) {
- expectedResults.add(keys.get(i));
- continue;
- }
- }
- }
- else {
- for(int i = keys.size() - 1; i >= 0; i--) {
- if( (lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive) ) {
- expectedResults.add(keys.get(i));
- continue;
- }
-
- if(lk < keys.get(i) && hk > keys.get(i)) {
- expectedResults.add(keys.get(i));
- continue;
- }
- }
- }
- }
-
- public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
-
- ArrayList<Integer> results = new ArrayList<Integer>();
- ArrayList<Integer> expectedResults = new ArrayList<Integer>();
-
- for(int i = minKey; i < maxKey; i++) {
- for(int j = minKey; j < maxKey; j++) {
-
- //if(i != -100 || j != 1) continue;
-
- results.clear();
- expectedResults.clear();
-
- int lowKey = i;
- int highKey = j;
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
- RangePredicate rangePred = createRangePredicate(lowKey, highKey, isForward, lowKeyInclusive, highKeyInclusive, btree.getMultiComparator(), btree.getMultiComparator().getTypeTraits());
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk,
+ boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive) {
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0), frameTuple.getFieldStart(0), frameTuple.getFieldLength(0));
- DataInput dataIn = new DataInputStream(inStream);
- Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
- results.add(res);
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
-
- getExpectedResults(expectedResults, keys, lowKey, highKey, isForward, lowKeyInclusive, highKeyInclusive);
-
- if(printExpectedResults) {
- if(expectedResults.size() > 0) {
- char l, u;
-
- if(lowKeyInclusive) l = '[';
- else l = '(';
-
- if(highKeyInclusive) u = ']';
- else u = ')';
-
- System.out.println("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
- for(Integer r : expectedResults)
- System.out.print(r + " ");
- System.out.println();
- }
- }
-
- if(results.size() == expectedResults.size()) {
- for(int k = 0; k < results.size(); k++) {
- if(!results.get(k).equals(expectedResults.get(k))) {
- System.out.println("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
- System.out.println(results.get(k) + " " + expectedResults.get(k));
- return false;
- }
- }
- }
- else {
- System.out.println("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
- System.out.println("RESULTS: " + results.size());
- System.out.println("EXPECTED RESULTS: " + expectedResults.size());
- return false;
- }
- }
- }
-
- return true;
+ // special cases
+ if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
+ return;
+ if (lk > hk)
+ return;
+
+ if (isForward) {
+ for (int i = 0; i < keys.size(); i++) {
+ if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+ expectedResults.add(keys.get(i));
+ continue;
+ }
+
+ if (lk < keys.get(i) && hk > keys.get(i)) {
+ expectedResults.add(keys.get(i));
+ continue;
+ }
+ }
+ } else {
+ for (int i = keys.size() - 1; i >= 0; i--) {
+ if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+ expectedResults.add(keys.get(i));
+ continue;
+ }
+
+ if (lk < keys.get(i) && hk > keys.get(i)) {
+ expectedResults.add(keys.get(i));
+ continue;
+ }
+ }
+ }
}
-
+
+ public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame,
+ IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean isForward, boolean lowKeyInclusive,
+ boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
+
+ ArrayList<Integer> results = new ArrayList<Integer>();
+ ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+
+ for (int i = minKey; i < maxKey; i++) {
+ for (int j = minKey; j < maxKey; j++) {
+
+ // if(i != -100 || j != 1) continue;
+
+ results.clear();
+ expectedResults.clear();
+
+ int lowKey = i;
+ int highKey = j;
+
+ IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ RangePredicate rangePred = createRangePredicate(lowKey, highKey, isForward, lowKeyInclusive,
+ highKeyInclusive, btree.getMultiComparator(), btree.getMultiComparator().getTypeTraits());
+ BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0), frameTuple
+ .getFieldStart(0), frameTuple.getFieldLength(0));
+ DataInput dataIn = new DataInputStream(inStream);
+ Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+ results.add(res);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
+
+ getExpectedResults(expectedResults, keys, lowKey, highKey, isForward, lowKeyInclusive, highKeyInclusive);
+
+ if (printExpectedResults) {
+ if (expectedResults.size() > 0) {
+ char l, u;
+
+ if (lowKeyInclusive)
+ l = '[';
+ else
+ l = '(';
+
+ if (highKeyInclusive)
+ u = ']';
+ else
+ u = ')';
+
+ System.out.println("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
+ for (Integer r : expectedResults)
+ System.out.print(r + " ");
+ System.out.println();
+ }
+ }
+
+ if (results.size() == expectedResults.size()) {
+ for (int k = 0; k < results.size(); k++) {
+ if (!results.get(k).equals(expectedResults.get(k))) {
+ System.out.println("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
+ System.out.println(results.get(k) + " " + expectedResults.get(k));
+ return false;
+ }
+ }
+ } else {
+ System.out.println("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
+ System.out.println("RESULTS: " + results.size());
+ System.out.println("EXPECTED RESULTS: " + expectedResults.size());
+ return false;
+ }
+ }
+ }
+
+ return true;
+ }
+
@After
public void tearDown() {
- }
+ }
}
diff --git a/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
new file mode 100644
index 0000000..b728ba0
--- /dev/null
+++ b/hyracks/hyracks-storage-am-btree/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -0,0 +1,253 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.api.DummySMI;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.sync.LatchType;
+
+public class StorageManagerTest {
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+
+ private String tmpDir = System.getProperty("java.io.tmpdir");
+
+ public class PinnedLatchedPage {
+ public final ICachedPage page;
+ public final LatchType latch;
+ public final int pageId;
+
+ public PinnedLatchedPage(ICachedPage page, int pageId, LatchType latch) {
+ this.page = page;
+ this.pageId = pageId;
+ this.latch = latch;
+ }
+ }
+
+ public enum FileAccessType {
+ FTA_READONLY, FTA_WRITEONLY, FTA_MIXED, FTA_UNLATCHED
+ }
+
+ public class FileAccessWorker implements Runnable {
+ private int workerId;
+ private final IBufferCache bufferCache;
+ private final int maxPages;
+ private final int fileId;
+ private final long thinkTime;
+ private final int maxLoopCount;
+ private final int maxPinnedPages;
+ private final int closeFileChance;
+ private final FileAccessType fta;
+ private int loopCount = 0;
+ private boolean fileIsOpen = false;
+ private Random rnd = new Random(50);
+ private List<PinnedLatchedPage> pinnedPages = new LinkedList<PinnedLatchedPage>();
+
+ public FileAccessWorker(int workerId, IBufferCache bufferCache, FileAccessType fta, int fileId, int maxPages,
+ int maxPinnedPages, int maxLoopCount, int closeFileChance, long thinkTime) {
+ this.bufferCache = bufferCache;
+ this.fileId = fileId;
+ this.maxPages = maxPages;
+ this.maxLoopCount = maxLoopCount;
+ this.maxPinnedPages = maxPinnedPages;
+ this.thinkTime = thinkTime;
+ this.closeFileChance = closeFileChance;
+ this.workerId = workerId;
+ this.fta = fta;
+ }
+
+ private void pinRandomPage() {
+ int pageId = Math.abs(rnd.nextInt() % maxPages);
+
+ System.out.println(workerId + " PINNING PAGE: " + pageId);
+
+ try {
+ ICachedPage page = bufferCache.pin(FileHandle.getDiskPageId(fileId, pageId), false);
+ LatchType latch = null;
+
+ switch (fta) {
+
+ case FTA_UNLATCHED: {
+ latch = null;
+ }
+ break;
+
+ case FTA_READONLY: {
+ System.out.println(workerId + " S LATCHING: " + pageId);
+ page.acquireReadLatch();
+ latch = LatchType.LATCH_S;
+ }
+ break;
+
+ case FTA_WRITEONLY: {
+ System.out.println(workerId + " X LATCHING: " + pageId);
+ page.acquireWriteLatch();
+ latch = LatchType.LATCH_X;
+ }
+ break;
+
+ case FTA_MIXED: {
+ if (rnd.nextInt() % 2 == 0) {
+ System.out.println(workerId + " S LATCHING: " + pageId);
+ page.acquireReadLatch();
+ latch = LatchType.LATCH_S;
+ } else {
+ System.out.println(workerId + " X LATCHING: " + pageId);
+ page.acquireWriteLatch();
+ latch = LatchType.LATCH_X;
+ }
+ }
+ break;
+
+ }
+
+ PinnedLatchedPage plPage = new PinnedLatchedPage(page, pageId, latch);
+ pinnedPages.add(plPage);
+ } catch (HyracksDataException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void unpinRandomPage() {
+ int index = Math.abs(rnd.nextInt() % pinnedPages.size());
+ try {
+ PinnedLatchedPage plPage = pinnedPages.get(index);
+
+ if (plPage.latch != null) {
+ if (plPage.latch == LatchType.LATCH_S) {
+ System.out.println(workerId + " S UNLATCHING: " + plPage.pageId);
+ plPage.page.releaseReadLatch();
+ } else {
+ System.out.println(workerId + " X UNLATCHING: " + plPage.pageId);
+ plPage.page.releaseWriteLatch();
+ }
+ }
+ System.out.println(workerId + " UNPINNING PAGE: " + plPage.pageId);
+
+ bufferCache.unpin(plPage.page);
+ pinnedPages.remove(index);
+ } catch (HyracksDataException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void openFile() {
+ System.out.println(workerId + " OPENING FILE: " + fileId);
+ try {
+ bufferCache.openFile(fileId);
+ fileIsOpen = true;
+ } catch (HyracksDataException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void closeFile() {
+ System.out.println(workerId + " CLOSING FILE: " + fileId);
+ try {
+ bufferCache.closeFile(fileId);
+ fileIsOpen = false;
+ } catch (HyracksDataException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void run() {
+
+ openFile();
+
+ while (loopCount < maxLoopCount) {
+ loopCount++;
+
+ System.out.println(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
+
+ if (fileIsOpen) {
+
+ // pin some pages
+ int pagesToPin = Math.abs(rnd.nextInt()) % (maxPinnedPages - pinnedPages.size());
+ for (int i = 0; i < pagesToPin; i++) {
+ pinRandomPage();
+ }
+
+ // do some thinking
+ try {
+ Thread.sleep(thinkTime);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+
+ // unpin some pages
+ if (!pinnedPages.isEmpty()) {
+ int pagesToUnpin = Math.abs(rnd.nextInt()) % pinnedPages.size();
+ for (int i = 0; i < pagesToUnpin; i++) {
+ unpinRandomPage();
+ }
+ }
+
+ // possibly close file
+ int closeFileCheck = Math.abs(rnd.nextInt()) % closeFileChance;
+ if (pinnedPages.isEmpty() || closeFileCheck == 0) {
+ int numPinnedPages = pinnedPages.size();
+ for (int i = 0; i < numPinnedPages; i++) {
+ unpinRandomPage();
+ }
+ closeFile();
+ }
+ } else {
+ openFile();
+ }
+ }
+
+ if (fileIsOpen) {
+ int numPinnedPages = pinnedPages.size();
+ for (int i = 0; i < numPinnedPages; i++) {
+ unpinRandomPage();
+ }
+ closeFile();
+ }
+ }
+ }
+
+ @Test
+ public void oneThreadOneFileTest() throws Exception {
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "testfile01.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+
+ Thread worker = new Thread(new FileAccessWorker(0, bufferCache, FileAccessType.FTA_UNLATCHED, fileId, 10, 10,
+ 100, 10, 0));
+
+ worker.start();
+
+ worker.join();
+
+ bufferCache.close();
+ }
+
+}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java
index b01025b..40cb7da 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
import java.io.DataOutput;
@@ -6,15 +21,18 @@
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
public interface IBinaryTokenizer {
-
- public void reset(byte[] data, int start, int length);
- public boolean hasNext();
- public void next();
-
- public int getTokenStartOff();
- public int getTokenLength();
-
- public void writeToken(DataOutput dos) throws IOException;
-
- public RecordDescriptor getTokenSchema();
+
+ public void reset(byte[] data, int start, int length);
+
+ public boolean hasNext();
+
+ public void next();
+
+ public int getTokenStartOff();
+
+ public int getTokenLength();
+
+ public void writeToken(DataOutput dos) throws IOException;
+
+ public RecordDescriptor getTokenSchema();
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
index e1072ae..7e91fd4 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
@@ -1,7 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
import java.io.Serializable;
public interface IBinaryTokenizerFactory extends Serializable {
- public IBinaryTokenizer createBinaryTokenizer();
+ public IBinaryTokenizer createBinaryTokenizer();
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
index 378c925a..990936e 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
@@ -1,10 +1,28 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
import java.nio.ByteBuffer;
-public interface IInvertedIndexResultCursor {
- public boolean hasNext();
- public void next();
- public ByteBuffer getBuffer();
- public void reset();
+public interface IInvertedIndexResultCursor {
+ public boolean hasNext();
+
+ public void next();
+
+ public ByteBuffer getBuffer();
+
+ public void reset();
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
index fc2dfd1..98dc3b9 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
@@ -1,9 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-public interface IInvertedIndexSearcher {
- public void search(ITupleReference queryTuple, int queryFieldIndex) throws HyracksDataException;
- public IInvertedIndexResultCursor getResultCursor();
+public interface IInvertedIndexSearcher {
+ public void search(ITupleReference queryTuple, int queryFieldIndex) throws Exception;
+
+ public IInvertedIndexResultCursor getResultCursor();
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
index 0d5dbc4..d438685 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksContext;
@@ -11,32 +26,30 @@
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
public class BinaryTokenizerOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- private final IBinaryTokenizerFactory tokenizerFactory;
- // fields that will be tokenized
- private final int[] tokenFields;
- // operator will emit these projected fields for each token, e.g., as payload for an inverted list
- // WARNING: too many projected fields can cause significant data blowup
- private final int[] projFields;
-
- public BinaryTokenizerOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc, IBinaryTokenizerFactory tokenizerFactory, int[] tokenFields, int[] projFields) {
- super(spec, 1, 1);
- this.tokenizerFactory = tokenizerFactory;
- this.tokenFields = tokenFields;
- this.projFields = projFields;
- recordDescriptors[0] = recDesc;
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
- IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition,
- int nPartitions) throws HyracksDataException {
- return new BinaryTokenizerOperatorNodePushable(ctx,
- recordDescProvider.getInputRecordDescriptor(odId, 0),
- recordDescriptors[0], tokenizerFactory.createBinaryTokenizer(),
- tokenFields, projFields);
- }
+
+ private static final long serialVersionUID = 1L;
+
+ private final IBinaryTokenizerFactory tokenizerFactory;
+ // fields that will be tokenized
+ private final int[] tokenFields;
+ // operator will emit these projected fields for each token, e.g., as
+ // payload for an inverted list
+ // WARNING: too many projected fields can cause significant data blowup
+ private final int[] projFields;
+
+ public BinaryTokenizerOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IBinaryTokenizerFactory tokenizerFactory, int[] tokenFields, int[] projFields) {
+ super(spec, 1, 1);
+ this.tokenizerFactory = tokenizerFactory;
+ this.tokenFields = tokenFields;
+ this.projFields = projFields;
+ recordDescriptors[0] = recDesc;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
+ return new BinaryTokenizerOperatorNodePushable(ctx, recordDescProvider.getInputRecordDescriptor(odId, 0),
+ recordDescriptors[0], tokenizerFactory.createBinaryTokenizer(), tokenFields, projFields);
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
index 869b0be..ffa8524 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
import java.io.DataOutput;
@@ -16,88 +31,90 @@
public class BinaryTokenizerOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private final IHyracksContext ctx;
- private final IBinaryTokenizer tokenizer;
- private final int[] tokenFields;
- private final int[] projFields;
- private final RecordDescriptor inputRecDesc;
- private final RecordDescriptor outputRecDesc;
-
- private FrameTupleAccessor accessor;
- private ArrayTupleBuilder builder;
- private DataOutput builderDos;
- private FrameTupleAppender appender;
- private ByteBuffer writeBuffer;
-
- public BinaryTokenizerOperatorNodePushable(IHyracksContext ctx, RecordDescriptor inputRecDesc, RecordDescriptor outputRecDesc, IBinaryTokenizer tokenizer, int[] tokenFields, int[] projFields) {
- this.ctx = ctx;
- this.tokenizer = tokenizer;
- this.tokenFields = tokenFields;
- this.projFields = projFields;
- this.inputRecDesc = inputRecDesc;
- this.outputRecDesc = outputRecDesc;
- }
-
- @Override
- public void open() throws HyracksDataException {
- accessor = new FrameTupleAccessor(ctx, inputRecDesc);
+ private final IHyracksContext ctx;
+ private final IBinaryTokenizer tokenizer;
+ private final int[] tokenFields;
+ private final int[] projFields;
+ private final RecordDescriptor inputRecDesc;
+ private final RecordDescriptor outputRecDesc;
+
+ private FrameTupleAccessor accessor;
+ private ArrayTupleBuilder builder;
+ private DataOutput builderDos;
+ private FrameTupleAppender appender;
+ private ByteBuffer writeBuffer;
+
+ public BinaryTokenizerOperatorNodePushable(IHyracksContext ctx, RecordDescriptor inputRecDesc,
+ RecordDescriptor outputRecDesc, IBinaryTokenizer tokenizer, int[] tokenFields, int[] projFields) {
+ this.ctx = ctx;
+ this.tokenizer = tokenizer;
+ this.tokenFields = tokenFields;
+ this.projFields = projFields;
+ this.inputRecDesc = inputRecDesc;
+ this.outputRecDesc = outputRecDesc;
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ accessor = new FrameTupleAccessor(ctx, inputRecDesc);
writeBuffer = ctx.getResourceManager().allocateFrame();
builder = new ArrayTupleBuilder(outputRecDesc.getFields().length);
builderDos = builder.getDataOutput();
appender = new FrameTupleAppender(ctx);
appender.reset(writeBuffer, true);
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- accessor.reset(buffer);
-
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+
int tupleCount = accessor.getTupleCount();
for (int i = 0; i < tupleCount; i++) {
-
- for(int j = 0; j < tokenFields.length; j++) {
-
- tokenizer.reset(accessor.getBuffer().array(),
- accessor.getTupleStartOffset(i) + accessor.getFieldSlotsLength() + accessor.getFieldStartOffset(i, tokenFields[j]),
- accessor.getFieldLength(i, tokenFields[j]));
-
- while(tokenizer.hasNext()) {
- tokenizer.next();
-
- builder.reset();
- try {
- tokenizer.writeToken(builderDos);
- builder.addFieldEndOffset();
- } catch (IOException e) {
- throw new HyracksDataException(e.getMessage());
- }
-
- for(int k = 0; k < projFields.length; k++) {
- builder.addField(accessor, i, projFields[k]);
- }
-
- if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
- FrameUtils.flushFrame(writeBuffer, writer);
- appender.reset(writeBuffer, true);
- if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- }
+
+ for (int j = 0; j < tokenFields.length; j++) {
+
+ tokenizer.reset(accessor.getBuffer().array(), accessor.getTupleStartOffset(i)
+ + accessor.getFieldSlotsLength() + accessor.getFieldStartOffset(i, tokenFields[j]), accessor
+ .getFieldLength(i, tokenFields[j]));
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ builder.reset();
+ try {
+ tokenizer.writeToken(builderDos);
+ builder.addFieldEndOffset();
+ } catch (IOException e) {
+ throw new HyracksDataException(e.getMessage());
+ }
+
+ for (int k = 0; k < projFields.length; k++) {
+ builder.addField(accessor, i, projFields[k]);
+ }
+
+ if (!appender.append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ appender.reset(writeBuffer, true);
+ if (!appender
+ .append(builder.getFieldEndOffsets(), builder.getByteArray(), 0, builder.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+ }
}
-
+
if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(writeBuffer, writer);
- }
- }
-
- @Override
- public void close() throws HyracksDataException {
- writer.close();
- }
-
- @Override
- public void flush() throws HyracksDataException {
- }
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ writer.close();
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java
index 88feb23..adc5221 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
import java.nio.ByteBuffer;
@@ -7,34 +22,36 @@
public class ListResultCursor implements IInvertedIndexResultCursor {
- private List<ByteBuffer> resultBuffers;
- private int numResultBuffers;
- private int currentPos = 0;
-
- public void setResults(List<ByteBuffer> resultBuffers, int numResultBuffers) {
- this.resultBuffers = resultBuffers;
- this.numResultBuffers = numResultBuffers;
- reset();
- }
-
- @Override
- public boolean hasNext() {
- if(currentPos+1 < numResultBuffers) return true;
- else return false;
- }
+ private List<ByteBuffer> resultBuffers;
+ private int numResultBuffers;
+ private int currentPos = 0;
- @Override
- public void next() {
- currentPos++;
- }
-
- @Override
- public ByteBuffer getBuffer() {
- return resultBuffers.get(currentPos);
- }
-
- @Override
- public void reset() {
- currentPos = -1;
- }
+ public void setResults(List<ByteBuffer> resultBuffers, int numResultBuffers) {
+ this.resultBuffers = resultBuffers;
+ this.numResultBuffers = numResultBuffers;
+ reset();
+ }
+
+ @Override
+ public boolean hasNext() {
+ if (currentPos + 1 < numResultBuffers)
+ return true;
+ else
+ return false;
+ }
+
+ @Override
+ public void next() {
+ currentPos++;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ return resultBuffers.get(currentPos);
+ }
+
+ @Override
+ public void reset() {
+ currentPos = -1;
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java
index b6e3547..6792b0f 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
import java.io.DataOutput;
@@ -31,240 +46,250 @@
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
public class SimpleConjunctiveSearcher implements IInvertedIndexSearcher {
-
- private final int numKeyFields;
- private final int numValueFields;
-
- private final IBinaryComparator[] keyCmps;
- private final IBinaryComparator[] valueCmps;
-
- private final BTree btree;
- private final IHyracksContext ctx;
- private final ArrayTupleBuilder resultTupleBuilder;
- private final FrameTupleAppender resultTupleAppender;
- private final FrameTupleAccessor resultFrameAccessor;
-
- private List<ByteBuffer> newResultBuffers = new ArrayList<ByteBuffer>();
- private List<ByteBuffer> prevResultBuffers = new ArrayList<ByteBuffer>();
- private List<ByteBuffer> swap = null;
- private final ListResultCursor resultCursor = new ListResultCursor();
- private int maxResultBufIdx = 0;
-
- private final IBTreeLeafFrame leafFrame;
- private final IBTreeInteriorFrame interiorFrame;
- private final IBTreeCursor btreeCursor;
- private final FrameTupleReference searchKey = new FrameTupleReference();
- private final RangePredicate pred = new RangePredicate(true, null, null, true, true, null, null);
-
- private final IBinaryTokenizer queryTokenizer;
-
- public SimpleConjunctiveSearcher(IHyracksContext ctx, BTree btree, RecordDescriptor btreeRecDesc, IBinaryTokenizer queryTokenizer, int numKeyFields, int numValueFields) {
- this.ctx = ctx;
- this.btree = btree;
- this.queryTokenizer = queryTokenizer;
- this.numKeyFields = numKeyFields;
- this.numValueFields = numValueFields;
-
- leafFrame = btree.getLeafFrameFactory().getFrame();
- interiorFrame = btree.getInteriorFrameFactory().getFrame();
- btreeCursor = new RangeSearchCursor(leafFrame);
- resultTupleAppender = new FrameTupleAppender(ctx);
- resultTupleBuilder = new ArrayTupleBuilder(numValueFields);
- newResultBuffers.add(ctx.getResourceManager().allocateFrame());
- prevResultBuffers.add(ctx.getResourceManager().allocateFrame());
- MultiComparator btreeCmp = btree.getMultiComparator();
-
- keyCmps = new IBinaryComparator[numKeyFields];
- for(int i = 0; i < numKeyFields; i++) {
- keyCmps[i] = btreeCmp.getComparators()[i];
- }
-
- valueCmps = new IBinaryComparator[numValueFields];
- for(int i = 0; i < numValueFields; i++) {
- valueCmps[i] = btreeCmp.getComparators()[numKeyFields + i];
- }
-
- MultiComparator searchCmp = new MultiComparator(btreeCmp.getTypeTraits(), keyCmps);
- pred.setLowKeyComparator(searchCmp);
- pred.setHighKeyComparator(searchCmp);
- pred.setLowKey(searchKey, true);
- pred.setHighKey(searchKey, true);
-
- ISerializerDeserializer[] valueSerde = new ISerializerDeserializer[numValueFields];
- for(int i = 0; i < numValueFields; i++) {
- valueSerde[i] = btreeRecDesc.getFields()[numKeyFields + i];
-
- }
- RecordDescriptor valueRecDesc = new RecordDescriptor(valueSerde);
- resultFrameAccessor = new FrameTupleAccessor(ctx, valueRecDesc);
- }
-
- public void search(ITupleReference queryTuple, int queryFieldIndex) throws HyracksDataException {
-
- // parse query, TODO: this parsing is too simple
- RecordDescriptor queryTokenRecDesc = new RecordDescriptor(new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE } );
-
- ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFields().length);
- DataOutput queryTokenDos = queryTokenBuilder.getDataOutput();
- FrameTupleAppender queryTokenAppender = new FrameTupleAppender(ctx);
- ByteBuffer queryTokenFrame = ctx.getResourceManager().allocateFrame();
- queryTokenAppender.reset(queryTokenFrame, true);
-
- queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex), queryTuple.getFieldLength(queryFieldIndex));
- while(queryTokenizer.hasNext()) {
- queryTokenizer.next();
-
- queryTokenBuilder.reset();
- try {
- queryTokenizer.writeToken(queryTokenDos);
- queryTokenBuilder.addFieldEndOffset();
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
-
- // WARNING: assuming one frame is enough to hold all tokens
- queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0, queryTokenBuilder.getSize());
- }
-
- FrameTupleAccessor queryTokenAccessor = new FrameTupleAccessor(ctx, queryTokenRecDesc);
- queryTokenAccessor.reset(queryTokenFrame);
- int numQueryTokens = queryTokenAccessor.getTupleCount();
-
- maxResultBufIdx = 0;
-
- BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
-
- resultTupleAppender.reset(newResultBuffers.get(0), true);
- try {
- // append first inverted list to temporary results
- searchKey.reset(queryTokenAccessor, 0);
- btree.search(btreeCursor, pred, opCtx);
- while(btreeCursor.hasNext()) {
- btreeCursor.next();
- maxResultBufIdx = appendTupleToNewResults(btreeCursor, maxResultBufIdx);
- }
- btreeCursor.reset();
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
-
- resultFrameAccessor.reset(newResultBuffers.get(0));
-
- // intersect temporary results with remaining inverted lists
- for(int i = 1; i < numQueryTokens; i++) {
- swap = prevResultBuffers;
- prevResultBuffers = newResultBuffers;
- newResultBuffers = swap;
- try {
- searchKey.reset(queryTokenAccessor, i);
- btree.search(btreeCursor, pred, opCtx);
- maxResultBufIdx = intersectList(btreeCursor, prevResultBuffers, maxResultBufIdx, newResultBuffers);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- btreeCursor.reset();
- }
- }
-
- private int appendTupleToNewResults(IBTreeCursor btreeCursor, int newBufIdx) throws IOException {
- ByteBuffer newCurrentBuffer = newResultBuffers.get(newBufIdx);
-
- ITupleReference tuple = btreeCursor.getTuple();
- resultTupleBuilder.reset();
- DataOutput dos = resultTupleBuilder.getDataOutput();
- for(int i = 0; i < numValueFields; i++) {
- int fIdx = numKeyFields + i;
- dos.write(tuple.getFieldData(fIdx), tuple.getFieldStart(fIdx), tuple.getFieldLength(fIdx));
- resultTupleBuilder.addFieldEndOffset();
- }
-
- if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(), 0, resultTupleBuilder.getSize())) {
- newBufIdx++;
- if(newBufIdx >= newResultBuffers.size()) {
- newResultBuffers.add(ctx.getResourceManager().allocateFrame());
- }
- newCurrentBuffer = newResultBuffers.get(newBufIdx);
- resultTupleAppender.reset(newCurrentBuffer, true);
- if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(), 0, resultTupleBuilder.getSize())) {
- throw new IllegalStateException();
- }
- }
-
- return newBufIdx;
- }
-
- private int intersectList(IBTreeCursor btreeCursor, List<ByteBuffer> prevResultBuffers, int maxPrevBufIdx, List<ByteBuffer> newResultBuffers) throws IOException, Exception {
-
- int newBufIdx = 0;
- ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
-
- int prevBufIdx = 0;
- ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
-
- resultTupleBuilder.reset();
- resultTupleAppender.reset(newCurrentBuffer, true);
- resultFrameAccessor.reset(prevCurrentBuffer);
-
- // WARNING: not very efficient but good enough for the first cut
- boolean advanceCursor = true;
- boolean advancePrevResult = false;
- int resultTidx = 0;
-
- while( (!advanceCursor || btreeCursor.hasNext()) && prevBufIdx <= maxPrevBufIdx && resultTidx < resultFrameAccessor.getTupleCount()) {
-
- if(advanceCursor) btreeCursor.next();
- ITupleReference tuple = btreeCursor.getTuple();
-
- int cmp = 0;
- for(int i = 0; i < valueCmps.length; i++) {
- int tupleFidx = numKeyFields + i;
- cmp = valueCmps[i].compare(tuple.getFieldData(tupleFidx),
- tuple.getFieldStart(tupleFidx),
- tuple.getFieldLength(tupleFidx),
- resultFrameAccessor.getBuffer().array(),
- resultFrameAccessor.getTupleStartOffset(resultTidx) + resultFrameAccessor.getFieldSlotsLength() + resultFrameAccessor.getFieldStartOffset(resultTidx, i),
- resultFrameAccessor.getFieldLength(resultTidx, i));
- if(cmp != 0) break;
- }
-
- // match found
- if(cmp == 0) {
- newBufIdx = appendTupleToNewResults(btreeCursor, newBufIdx);
-
- advanceCursor = true;
- advancePrevResult = true;
- }
- else {
- if(cmp < 0) {
- advanceCursor = true;
- advancePrevResult = false;
- }
- else {
- advanceCursor = false;
- advancePrevResult = true;
- }
- }
-
- if(advancePrevResult) {
- resultTidx++;
- if(resultTidx >= resultFrameAccessor.getTupleCount()) {
- prevBufIdx++;
- if(prevBufIdx <= maxPrevBufIdx) {
- prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
- resultFrameAccessor.reset(prevCurrentBuffer);
- resultTidx = 0;
- }
- }
- }
- }
-
- return newBufIdx;
- }
+ private final int numKeyFields;
+ private final int numValueFields;
- @Override
- public IInvertedIndexResultCursor getResultCursor() {
- resultCursor.setResults(newResultBuffers, maxResultBufIdx + 1);
- return resultCursor;
- }
+ private final IBinaryComparator[] keyCmps;
+ private final IBinaryComparator[] valueCmps;
+
+ private final BTree btree;
+ private final IHyracksContext ctx;
+ private final ArrayTupleBuilder resultTupleBuilder;
+ private final FrameTupleAppender resultTupleAppender;
+ private final FrameTupleAccessor resultFrameAccessor;
+
+ private List<ByteBuffer> newResultBuffers = new ArrayList<ByteBuffer>();
+ private List<ByteBuffer> prevResultBuffers = new ArrayList<ByteBuffer>();
+ private List<ByteBuffer> swap = null;
+ private final ListResultCursor resultCursor = new ListResultCursor();
+ private int maxResultBufIdx = 0;
+
+ private final IBTreeLeafFrame leafFrame;
+ private final IBTreeInteriorFrame interiorFrame;
+ private final IBTreeCursor btreeCursor;
+ private final FrameTupleReference searchKey = new FrameTupleReference();
+ private final RangePredicate pred = new RangePredicate(true, null, null, true, true, null, null);
+
+ private final IBinaryTokenizer queryTokenizer;
+
+ public SimpleConjunctiveSearcher(IHyracksContext ctx, BTree btree, RecordDescriptor btreeRecDesc,
+ IBinaryTokenizer queryTokenizer, int numKeyFields, int numValueFields) {
+ this.ctx = ctx;
+ this.btree = btree;
+ this.queryTokenizer = queryTokenizer;
+ this.numKeyFields = numKeyFields;
+ this.numValueFields = numValueFields;
+
+ leafFrame = btree.getLeafFrameFactory().getFrame();
+ interiorFrame = btree.getInteriorFrameFactory().getFrame();
+ btreeCursor = new RangeSearchCursor(leafFrame);
+ resultTupleAppender = new FrameTupleAppender(ctx);
+ resultTupleBuilder = new ArrayTupleBuilder(numValueFields);
+ newResultBuffers.add(ctx.getResourceManager().allocateFrame());
+ prevResultBuffers.add(ctx.getResourceManager().allocateFrame());
+
+ MultiComparator btreeCmp = btree.getMultiComparator();
+
+ keyCmps = new IBinaryComparator[numKeyFields];
+ for (int i = 0; i < numKeyFields; i++) {
+ keyCmps[i] = btreeCmp.getComparators()[i];
+ }
+
+ valueCmps = new IBinaryComparator[numValueFields];
+ for (int i = 0; i < numValueFields; i++) {
+ valueCmps[i] = btreeCmp.getComparators()[numKeyFields + i];
+ }
+
+ MultiComparator searchCmp = new MultiComparator(btreeCmp.getTypeTraits(), keyCmps);
+ pred.setLowKeyComparator(searchCmp);
+ pred.setHighKeyComparator(searchCmp);
+ pred.setLowKey(searchKey, true);
+ pred.setHighKey(searchKey, true);
+
+ ISerializerDeserializer[] valueSerde = new ISerializerDeserializer[numValueFields];
+ for (int i = 0; i < numValueFields; i++) {
+ valueSerde[i] = btreeRecDesc.getFields()[numKeyFields + i];
+
+ }
+ RecordDescriptor valueRecDesc = new RecordDescriptor(valueSerde);
+ resultFrameAccessor = new FrameTupleAccessor(ctx, valueRecDesc);
+ }
+
+ public void search(ITupleReference queryTuple, int queryFieldIndex) throws Exception {
+
+ // parse query, TODO: this parsing is too simple
+ RecordDescriptor queryTokenRecDesc = new RecordDescriptor(
+ new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+
+ ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFields().length);
+ DataOutput queryTokenDos = queryTokenBuilder.getDataOutput();
+ FrameTupleAppender queryTokenAppender = new FrameTupleAppender(ctx);
+ ByteBuffer queryTokenFrame = ctx.getResourceManager().allocateFrame();
+ queryTokenAppender.reset(queryTokenFrame, true);
+
+ queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex),
+ queryTuple.getFieldLength(queryFieldIndex));
+ while (queryTokenizer.hasNext()) {
+ queryTokenizer.next();
+
+ queryTokenBuilder.reset();
+ try {
+ queryTokenizer.writeToken(queryTokenDos);
+ queryTokenBuilder.addFieldEndOffset();
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+
+ // WARNING: assuming one frame is enough to hold all tokens
+ queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
+ queryTokenBuilder.getSize());
+ }
+
+ FrameTupleAccessor queryTokenAccessor = new FrameTupleAccessor(ctx, queryTokenRecDesc);
+ queryTokenAccessor.reset(queryTokenFrame);
+ int numQueryTokens = queryTokenAccessor.getTupleCount();
+
+ maxResultBufIdx = 0;
+
+ BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+
+ resultTupleAppender.reset(newResultBuffers.get(0), true);
+ try {
+ // append first inverted list to temporary results
+ searchKey.reset(queryTokenAccessor, 0);
+ btree.search(btreeCursor, pred, opCtx);
+ while (btreeCursor.hasNext()) {
+ btreeCursor.next();
+ maxResultBufIdx = appendTupleToNewResults(btreeCursor, maxResultBufIdx);
+ }
+ btreeCursor.close();
+ btreeCursor.reset();
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+
+ resultFrameAccessor.reset(newResultBuffers.get(0));
+
+ // intersect temporary results with remaining inverted lists
+ for (int i = 1; i < numQueryTokens; i++) {
+ swap = prevResultBuffers;
+ prevResultBuffers = newResultBuffers;
+ newResultBuffers = swap;
+ try {
+ searchKey.reset(queryTokenAccessor, i);
+ btree.search(btreeCursor, pred, opCtx);
+ maxResultBufIdx = intersectList(btreeCursor, prevResultBuffers, maxResultBufIdx, newResultBuffers);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ btreeCursor.close();
+ btreeCursor.reset();
+ }
+ }
+
+ private int appendTupleToNewResults(IBTreeCursor btreeCursor, int newBufIdx) throws IOException {
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(newBufIdx);
+
+ ITupleReference tuple = btreeCursor.getTuple();
+ resultTupleBuilder.reset();
+ DataOutput dos = resultTupleBuilder.getDataOutput();
+ for (int i = 0; i < numValueFields; i++) {
+ int fIdx = numKeyFields + i;
+ dos.write(tuple.getFieldData(fIdx), tuple.getFieldStart(fIdx), tuple.getFieldLength(fIdx));
+ resultTupleBuilder.addFieldEndOffset();
+ }
+
+ if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(), 0,
+ resultTupleBuilder.getSize())) {
+ newBufIdx++;
+ if (newBufIdx >= newResultBuffers.size()) {
+ newResultBuffers.add(ctx.getResourceManager().allocateFrame());
+ }
+ newCurrentBuffer = newResultBuffers.get(newBufIdx);
+ resultTupleAppender.reset(newCurrentBuffer, true);
+ if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(),
+ 0, resultTupleBuilder.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+
+ return newBufIdx;
+ }
+
+ private int intersectList(IBTreeCursor btreeCursor, List<ByteBuffer> prevResultBuffers, int maxPrevBufIdx,
+ List<ByteBuffer> newResultBuffers) throws IOException, Exception {
+
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ resultTupleBuilder.reset();
+ resultTupleAppender.reset(newCurrentBuffer, true);
+ resultFrameAccessor.reset(prevCurrentBuffer);
+
+ // WARNING: not very efficient but good enough for the first cut
+ boolean advanceCursor = true;
+ boolean advancePrevResult = false;
+ int resultTidx = 0;
+
+ while ((!advanceCursor || btreeCursor.hasNext()) && prevBufIdx <= maxPrevBufIdx
+ && resultTidx < resultFrameAccessor.getTupleCount()) {
+
+ if (advanceCursor)
+ btreeCursor.next();
+ ITupleReference tuple = btreeCursor.getTuple();
+
+ int cmp = 0;
+ for (int i = 0; i < valueCmps.length; i++) {
+ int tupleFidx = numKeyFields + i;
+ cmp = valueCmps[i].compare(tuple.getFieldData(tupleFidx), tuple.getFieldStart(tupleFidx), tuple
+ .getFieldLength(tupleFidx), resultFrameAccessor.getBuffer().array(), resultFrameAccessor
+ .getTupleStartOffset(resultTidx)
+ + resultFrameAccessor.getFieldSlotsLength()
+ + resultFrameAccessor.getFieldStartOffset(resultTidx, i), resultFrameAccessor.getFieldLength(
+ resultTidx, i));
+ if (cmp != 0)
+ break;
+ }
+
+ // match found
+ if (cmp == 0) {
+ newBufIdx = appendTupleToNewResults(btreeCursor, newBufIdx);
+
+ advanceCursor = true;
+ advancePrevResult = true;
+ } else {
+ if (cmp < 0) {
+ advanceCursor = true;
+ advancePrevResult = false;
+ } else {
+ advanceCursor = false;
+ advancePrevResult = true;
+ }
+ }
+
+ if (advancePrevResult) {
+ resultTidx++;
+ if (resultTidx >= resultFrameAccessor.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameAccessor.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+
+ @Override
+ public IInvertedIndexResultCursor getResultCursor() {
+ resultCursor.setResults(newResultBuffers, maxResultBufIdx + 1);
+ return resultCursor;
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
index 9a47280..73635f9 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
import java.io.DataOutput;
@@ -10,74 +25,76 @@
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
public class DelimitedUTF8StringBinaryTokenizer implements IBinaryTokenizer {
-
- private static final RecordDescriptor tokenSchema =
- new RecordDescriptor(new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE } );
-
- private final char delimiter;
- private byte[] data;
- private int start;
- private int length;
-
- private int tokenLength;
- private int tokenStart;
- private int pos;
-
- public DelimitedUTF8StringBinaryTokenizer(char delimiter) {
- this.delimiter = delimiter;
- }
-
- @Override
- public int getTokenLength() {
- return tokenLength;
- }
- @Override
- public int getTokenStartOff() {
- return tokenStart;
- }
+ private static final RecordDescriptor tokenSchema = new RecordDescriptor(
+ new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
- @Override
- public boolean hasNext() {
- if(pos >= start + length) return false;
- else return true;
- }
-
- @Override
- public void next() {
- tokenLength = 0;
- tokenStart = pos;
- while(pos < start + length) {
- int len = StringUtils.charSize(data, pos);
- char ch = StringUtils.charAt(data, pos);
- pos += len;
- if(ch == delimiter) {
- break;
- }
- tokenLength += len;
- }
- }
-
- @Override
- public void reset(byte[] data, int start, int length) {
- this.data = data;
- this.start = start;
- this.pos = start;
- this.length = length;
- this.tokenLength = 0;
- this.tokenStart = 0;
+ private final char delimiter;
+ private byte[] data;
+ private int start;
+ private int length;
+
+ private int tokenLength;
+ private int tokenStart;
+ private int pos;
+
+ public DelimitedUTF8StringBinaryTokenizer(char delimiter) {
+ this.delimiter = delimiter;
+ }
+
+ @Override
+ public int getTokenLength() {
+ return tokenLength;
+ }
+
+ @Override
+ public int getTokenStartOff() {
+ return tokenStart;
+ }
+
+ @Override
+ public boolean hasNext() {
+ if (pos >= start + length)
+ return false;
+ else
+ return true;
+ }
+
+ @Override
+ public void next() {
+ tokenLength = 0;
+ tokenStart = pos;
+ while (pos < start + length) {
+ int len = StringUtils.charSize(data, pos);
+ char ch = StringUtils.charAt(data, pos);
+ pos += len;
+ if (ch == delimiter) {
+ break;
+ }
+ tokenLength += len;
+ }
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length) {
+ this.data = data;
+ this.start = start;
+ this.pos = start;
+ this.length = length;
+ this.tokenLength = 0;
+ this.tokenStart = 0;
pos += 2; // UTF-8 specific
- }
-
- @Override
- public void writeToken(DataOutput dos) throws IOException {
- // WARNING: 2-byte length indicator is specific to UTF-8
- dos.writeShort((short)tokenLength);
- dos.write(data, tokenStart, tokenLength);
- }
+ }
- @Override
- public RecordDescriptor getTokenSchema() {
- return tokenSchema;
- }
+ @Override
+ public void writeToken(DataOutput dos) throws IOException {
+ // WARNING: 2-byte length indicator is specific to UTF-8
+ dos.writeShort((short) tokenLength);
+ dos.write(data, tokenStart, tokenLength);
+ }
+
+ @Override
+ public RecordDescriptor getTokenSchema() {
+ return tokenSchema;
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
index 6432c4a..e3e0be3 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
@@ -1,19 +1,34 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
public class DelimitedUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
-
- private static final long serialVersionUID = 1L;
- private final char delimiter;
-
- public DelimitedUTF8StringBinaryTokenizerFactory(char delimiter) {
- this.delimiter = delimiter;
- }
-
- @Override
- public IBinaryTokenizer createBinaryTokenizer() {
- return new DelimitedUTF8StringBinaryTokenizer(delimiter);
- }
+
+ private static final long serialVersionUID = 1L;
+ private final char delimiter;
+
+ public DelimitedUTF8StringBinaryTokenizerFactory(char delimiter) {
+ this.delimiter = delimiter;
+ }
+
+ @Override
+ public IBinaryTokenizer createBinaryTokenizer() {
+ return new DelimitedUTF8StringBinaryTokenizer(delimiter);
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java
index 68fd502..54fc371 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
import java.io.DataOutput;
@@ -10,121 +25,121 @@
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
public class HashedQGramUTF8StringBinaryTokenizer implements IBinaryTokenizer {
-
- private static final RecordDescriptor tokenSchema =
- new RecordDescriptor(new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE } );
-
- private final boolean prePost;
- private final int q;
- private byte[] data;
- private int start;
- private int length;
- private int gramNum;
- private int utflen;
-
- private final char PRECHAR = '#';
- private final char POSTCHAR = '$';
-
- private int charPos;
- private int pos;
- private int hashedGram;
-
- HashedQGramUTF8StringBinaryTokenizer(int q, boolean prePost) {
- this.prePost = prePost;
- this.q = q;
- }
-
- @Override
- public int getTokenLength() {
- // the produced token (hashed q-gram) is derived from data
- // but not contained in it
- // therefore this call does not make sense
- return -1;
- }
- @Override
- public int getTokenStartOff() {
- // the produced token (hashed q-gram) is derived from data
- // but not contained in it
- // therefore this call does not make sense
- return -1;
- }
+ private static final RecordDescriptor tokenSchema = new RecordDescriptor(
+ new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
- @Override
- public boolean hasNext() {
- if((prePost && pos >= start + length) || (!prePost && pos >= start + length - q)) return false;
- else return true;
- }
+ private final boolean prePost;
+ private final int q;
+ private byte[] data;
+ private int start;
+ private int length;
+ private int gramNum;
+ private int utflen;
- @Override
- public void next() {
- hashedGram = 0;
- if(prePost) {
- if(gramNum < q) {
- for(int i = 0; i < q - gramNum; i++) {
- hashedGram = 31 * hashedGram + PRECHAR;
- }
-
- int tmpPos = pos;
- for(int i = 0; i < gramNum; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
- }
- else {
- int stopStr = Math.min(charPos + q, utflen);
- int tmpPos = pos;
- for(int i = charPos; i < stopStr; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
-
- int stopPost = (charPos + q) - (utflen);
- for(int i = 0; i < stopPost; i++) {
- hashedGram = 31 * hashedGram + POSTCHAR;
- }
- pos += StringUtils.charSize(data, pos);
- charPos++;
- }
- gramNum++;
- }
- else {
- int tmpPos = pos;
- for(int i = charPos; i < charPos + q; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
- pos += StringUtils.charSize(data, pos);
- charPos++;
- }
- }
+ private final char PRECHAR = '#';
+ private final char POSTCHAR = '$';
- @Override
- public void reset(byte[] data, int start, int length) {
- this.data = data;
- this.start = start;
- this.length = length;
- this.utflen = StringUtils.getUTFLen(data, start);
- this.pos = start + 2; // UTF-8 specific
- this.gramNum = 1;
- this.charPos = 0;
- }
-
- @Override
- public void writeToken(DataOutput dos) throws IOException {
- dos.writeInt(hashedGram);
- }
-
- public char getPreChar() {
- return PRECHAR;
- }
-
- public char getPostChar() {
- return POSTCHAR;
- }
+ private int charPos;
+ private int pos;
+ private int hashedGram;
- @Override
- public RecordDescriptor getTokenSchema() {
- return tokenSchema;
- }
+ HashedQGramUTF8StringBinaryTokenizer(int q, boolean prePost) {
+ this.prePost = prePost;
+ this.q = q;
+ }
+
+ @Override
+ public int getTokenLength() {
+ // the produced token (hashed q-gram) is derived from data
+ // but not contained in it
+ // therefore this call does not make sense
+ return -1;
+ }
+
+ @Override
+ public int getTokenStartOff() {
+ // the produced token (hashed q-gram) is derived from data
+ // but not contained in it
+ // therefore this call does not make sense
+ return -1;
+ }
+
+ @Override
+ public boolean hasNext() {
+ if ((prePost && pos >= start + length) || (!prePost && pos >= start + length - q))
+ return false;
+ else
+ return true;
+ }
+
+ @Override
+ public void next() {
+ hashedGram = 0;
+ if (prePost) {
+ if (gramNum < q) {
+ for (int i = 0; i < q - gramNum; i++) {
+ hashedGram = 31 * hashedGram + PRECHAR;
+ }
+
+ int tmpPos = pos;
+ for (int i = 0; i < gramNum; i++) {
+ hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
+ tmpPos += StringUtils.charSize(data, tmpPos);
+ }
+ } else {
+ int stopStr = Math.min(charPos + q, utflen);
+ int tmpPos = pos;
+ for (int i = charPos; i < stopStr; i++) {
+ hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
+ tmpPos += StringUtils.charSize(data, tmpPos);
+ }
+
+ int stopPost = (charPos + q) - (utflen);
+ for (int i = 0; i < stopPost; i++) {
+ hashedGram = 31 * hashedGram + POSTCHAR;
+ }
+ pos += StringUtils.charSize(data, pos);
+ charPos++;
+ }
+ gramNum++;
+ } else {
+ int tmpPos = pos;
+ for (int i = charPos; i < charPos + q; i++) {
+ hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
+ tmpPos += StringUtils.charSize(data, tmpPos);
+ }
+ pos += StringUtils.charSize(data, pos);
+ charPos++;
+ }
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length) {
+ this.data = data;
+ this.start = start;
+ this.length = length;
+ this.utflen = StringUtils.getUTFLen(data, start);
+ this.pos = start + 2; // UTF-8 specific
+ this.gramNum = 1;
+ this.charPos = 0;
+ }
+
+ @Override
+ public void writeToken(DataOutput dos) throws IOException {
+ dos.writeInt(hashedGram);
+ }
+
+ public char getPreChar() {
+ return PRECHAR;
+ }
+
+ public char getPostChar() {
+ return POSTCHAR;
+ }
+
+ @Override
+ public RecordDescriptor getTokenSchema() {
+ return tokenSchema;
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java
index 98fd9ad..a11fe8a 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java
@@ -1,21 +1,36 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
public class HashedQGramUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
-
- private static final long serialVersionUID = 1L;
- private final int q;
- private final boolean prePost;
-
- public HashedQGramUTF8StringBinaryTokenizerFactory(int q, boolean prePost) {
- this.q = q;
- this.prePost = prePost;
- }
-
- @Override
- public IBinaryTokenizer createBinaryTokenizer() {
- return new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
- }
+
+ private static final long serialVersionUID = 1L;
+ private final int q;
+ private final boolean prePost;
+
+ public HashedQGramUTF8StringBinaryTokenizerFactory(int q, boolean prePost) {
+ this.q = q;
+ this.prePost = prePost;
+ }
+
+ @Override
+ public IBinaryTokenizer createBinaryTokenizer() {
+ return new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java b/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
index 3a5c903..72785dd 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
@@ -1,11 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.searchers;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
-import java.io.File;
-import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
@@ -29,6 +42,7 @@
import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.DummySMI;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
@@ -47,29 +61,25 @@
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SimpleConjunctiveSearcher;
import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IPageReplacementStrategy;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class SimpleConjunctiveSearcherTest {
-
- // testing params
- //private static final int PAGE_SIZE = 256;
- //private static final int NUM_PAGES = 10;
- //private static final int HYRACKS_FRAME_SIZE = 256;
- // realistic params
- //private static final int PAGE_SIZE = 65536;
- private static final int PAGE_SIZE = 32768;
+ // testing params
+ // private static final int PAGE_SIZE = 256;
+ // private static final int NUM_PAGES = 10;
+ // private static final int HYRACKS_FRAME_SIZE = 256;
+
+ // realistic params
+ // private static final int PAGE_SIZE = 65536;
+ private static final int PAGE_SIZE = 32768;
private static final int NUM_PAGES = 10;
private static final int HYRACKS_FRAME_SIZE = 32768;
-
- private String tmpDir = System.getProperty("java.io.tmpdir");
-
+
+ private String tmpDir = System.getProperty("java.io.tmpdir");
+
public class BufferAllocator implements ICacheMemoryAllocator {
@Override
public ByteBuffer[] allocate(int pageSize, int numPages) {
@@ -80,194 +90,190 @@
return buffers;
}
}
-
- @Test
- public void test01() throws Exception {
-
- FileManager fileManager = new FileManager();
- ICacheMemoryAllocator allocator = new BufferAllocator();
- IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- IBufferCache bufferCache = new BufferCache(allocator, prs, fileManager, PAGE_SIZE, NUM_PAGES);
- File f = new File(tmpDir + "/" + "btreetest.bin");
- RandomAccessFile raf = new RandomAccessFile(f, "rw");
- int fileId = 0;
- FileInfo fi = new FileInfo(fileId, raf);
- fileManager.registerFile(fi);
-
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ @Test
+ public void test01() throws Exception {
+
+ DummySMI smi = new DummySMI(PAGE_SIZE, NUM_PAGES);
+ IBufferCache bufferCache = smi.getBufferCache();
+ IFileMapProvider fmp = smi.getFileMapProvider();
+ String fileName = tmpDir + "/" + "btreetest.bin";
+ bufferCache.createFile(fileName);
+ int fileId = fmp.lookupFileId(fileName);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
typeTraits[1] = new TypeTrait(4);
-
+
// declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- //SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- //IBTreeLeafFrameFactory leafFrameFactory = new FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+ // IBTreeLeafFrameFactory leafFrameFactory = new
+ // FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
+
IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
- Random rnd = new Random();
- rnd.setSeed(50);
+ BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ IHyracksContext ctx = new RootHyracksContext(HYRACKS_FRAME_SIZE);
ByteBuffer frame = ctx.getResourceManager().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor btreeRecDesc = new RecordDescriptor(btreeSerde);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, btreeRecDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- List<String> tokens = new ArrayList<String>();
- tokens.add("computer");
- tokens.add("hyracks");
- tokens.add("fast");
- tokens.add("university");
- tokens.add("science");
- tokens.add("major");
-
- int maxId = 1000000;
- int addProb = 0;
- int addProbStep = 2;
-
- BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- for (int i = 0; i < tokens.size(); i++) {
-
- addProb += addProbStep;
- for(int j = 0; j < maxId; j++) {
- if((Math.abs(rnd.nextInt()) % addProb) == 0) {
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
- tb.addFieldEndOffset();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+ ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor btreeRecDesc = new RecordDescriptor(btreeSerde);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx, btreeRecDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- tuple.reset(accessor, 0);
+ List<String> tokens = new ArrayList<String>();
+ tokens.add("computer");
+ tokens.add("hyracks");
+ tokens.add("fast");
+ tokens.add("university");
+ tokens.add("science");
+ tokens.add("major");
- try {
- btree.insert(tuple, opCtx);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- }
-
- int numPages = btree.getMaxPage(metaFrame);
- System.out.println("NUMPAGES: " + numPages);
-
- // build query as tuple reference
- ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
-
- FrameTupleAppender queryAppender = new FrameTupleAppender(ctx);
- ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
- DataOutput queryDos = queryTb.getDataOutput();
-
- IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(ctx, queryRecDesc);
- queryAccessor.reset(frame);
- FrameTupleReference queryTuple = new FrameTupleReference();
-
- String query = "computer hyracks fast";
- char queryDelimiter = ' ';
- IBinaryTokenizer queryTokenizer = new DelimitedUTF8StringBinaryTokenizer(queryDelimiter);
-
- queryTb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(query, queryDos);
- queryTb.addFieldEndOffset();
-
- queryAppender.reset(frame, true);
- queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
- queryTuple.reset(queryAccessor, 0);
-
- int numKeyFields = 1;
- int numValueFields = 1;
- ISerializerDeserializer[] resultSerde = new ISerializerDeserializer[numValueFields];
- for(int i = 0; i < numValueFields; i++) {
- resultSerde[i] = btreeSerde[numKeyFields + i];
- }
- RecordDescriptor resultRecDesc = new RecordDescriptor(resultSerde);
- FrameTupleAccessor resultAccessor = new FrameTupleAccessor(ctx, resultRecDesc);
- FrameTupleReference resultTuple = new FrameTupleReference();
-
- SimpleConjunctiveSearcher searcher = new SimpleConjunctiveSearcher(ctx, btree, btreeRecDesc, queryTokenizer, numKeyFields, numValueFields);
-
- long timeStart = System.currentTimeMillis();
- searcher.search(queryTuple, 0);
- long timeEnd = System.currentTimeMillis();
- System.out.println("SEARCH TIME: " + (timeEnd - timeStart) + "ms");
-
- //System.out.println("INTERSECTION RESULTS");
- IInvertedIndexResultCursor resultCursor = searcher.getResultCursor();
- while(resultCursor.hasNext()) {
- resultCursor.next();
- resultAccessor.reset(resultCursor.getBuffer());
- for(int i = 0; i < resultAccessor.getTupleCount(); i++) {
- resultTuple.reset(resultAccessor, i);
- for(int j = 0; j < resultTuple.getFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(resultTuple.getFieldData(j), resultTuple.getFieldStart(j), resultTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = resultSerde[j].deserialize(dataIn);
- //System.out.print(o + " ");
- }
- //System.out.println();
- }
- }
-
- /*
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- // ordered scan
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, metaFrame);
- btree.search(scanCursor, nullPred, searchOpCtx);
-
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, btreeSerde);
- System.out.println(rec);
+ int maxId = 10000;
+ int addProb = 0;
+ int addProbStep = 2;
+
+ BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+ for (int i = 0; i < tokens.size(); i++) {
+
+ addProb += addProbStep;
+ for (int j = 0; j < maxId; j++) {
+ if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
+ btree.insert(tuple, opCtx);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
}
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
}
- */
-
-
- btree.close();
+ int numPages = btree.getMaxPage(metaFrame);
+ System.out.println("NUMPAGES: " + numPages);
+
+ // build query as tuple reference
+ ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
+
+ FrameTupleAppender queryAppender = new FrameTupleAppender(ctx);
+ ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
+ DataOutput queryDos = queryTb.getDataOutput();
+
+ IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(ctx, queryRecDesc);
+ queryAccessor.reset(frame);
+ FrameTupleReference queryTuple = new FrameTupleReference();
+
+ String query = "computer hyracks fast";
+ char queryDelimiter = ' ';
+ IBinaryTokenizer queryTokenizer = new DelimitedUTF8StringBinaryTokenizer(queryDelimiter);
+
+ queryTb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(query, queryDos);
+ queryTb.addFieldEndOffset();
+
+ queryAppender.reset(frame, true);
+ queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
+ queryTuple.reset(queryAccessor, 0);
+
+ int numKeyFields = 1;
+ int numValueFields = 1;
+ ISerializerDeserializer[] resultSerde = new ISerializerDeserializer[numValueFields];
+ for (int i = 0; i < numValueFields; i++) {
+ resultSerde[i] = btreeSerde[numKeyFields + i];
+ }
+ RecordDescriptor resultRecDesc = new RecordDescriptor(resultSerde);
+ FrameTupleAccessor resultAccessor = new FrameTupleAccessor(ctx, resultRecDesc);
+ FrameTupleReference resultTuple = new FrameTupleReference();
+
+ SimpleConjunctiveSearcher searcher = new SimpleConjunctiveSearcher(ctx, btree, btreeRecDesc, queryTokenizer,
+ numKeyFields, numValueFields);
+
+ long timeStart = System.currentTimeMillis();
+ searcher.search(queryTuple, 0);
+ long timeEnd = System.currentTimeMillis();
+ System.out.println("SEARCH TIME: " + (timeEnd - timeStart) + "ms");
+
+ // System.out.println("INTERSECTION RESULTS");
+ IInvertedIndexResultCursor resultCursor = searcher.getResultCursor();
+ while (resultCursor.hasNext()) {
+ resultCursor.next();
+ resultAccessor.reset(resultCursor.getBuffer());
+ for (int i = 0; i < resultAccessor.getTupleCount(); i++) {
+ resultTuple.reset(resultAccessor, i);
+ for (int j = 0; j < resultTuple.getFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(resultTuple.getFieldData(j), resultTuple
+ .getFieldStart(j), resultTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = resultSerde[j].deserialize(dataIn);
+ System.out.print(o + " ");
+ }
+ System.out.println();
+ }
+ }
+
+ /*
+ * IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ * searchCmps[0] =
+ * UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ * MultiComparator searchCmp = new MultiComparator(typeTraits,
+ * searchCmps);
+ *
+ * // ordered scan IBTreeCursor scanCursor = new
+ * RangeSearchCursor(leafFrame); RangePredicate nullPred = new
+ * RangePredicate(true, null, null, true, true, null); BTreeOpContext
+ * searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame,
+ * interiorFrame, metaFrame); btree.search(scanCursor, nullPred,
+ * searchOpCtx);
+ *
+ * try { while (scanCursor.hasNext()) { scanCursor.next();
+ * ITupleReference frameTuple = scanCursor.getTuple(); String rec =
+ * cmp.printTuple(frameTuple, btreeSerde); System.out.println(rec); } }
+ * catch (Exception e) { e.printStackTrace(); } finally {
+ * scanCursor.close(); }
+ */
+
+ btree.close();
+ bufferCache.closeFile(fileId);
bufferCache.close();
- fileManager.close();
- }
+ }
}
diff --git a/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java b/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
index 1c1f1f9..47c75cf 100644
--- a/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
+++ b/hyracks/hyracks-storage-am-invertedindex/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
import java.io.ByteArrayInputStream;
@@ -18,156 +33,162 @@
public class TokenizerTest {
- // testing DelimitedUTF8StringBinaryTokenizer
- @Test
- public void test01() throws Exception {
- Random rnd = new Random(50);
-
- int numDocs = 100;
- int maxWords = 1000;
- int maxWordLength = 50;
- char delimiter = ' ';
-
- DelimitedUTF8StringBinaryTokenizer tok = new DelimitedUTF8StringBinaryTokenizer(delimiter);
-
- // create a bunch of documents
- for(int i = 0; i < numDocs; i++) {
-
- // create a single document with a bunch of words
- int words = (Math.abs(rnd.nextInt()) % maxWords) + 1;
- StringBuilder strBuilder = new StringBuilder();
- for(int j = 0; j < words; j++) {
- int len = (Math.abs(rnd.nextInt()) % maxWordLength) + 1;
- String s = randomString(len, rnd);
- strBuilder.append(s);
- if(j < words-1) strBuilder.append(delimiter);
- }
-
- String doc = strBuilder.toString();
-
- // serialize document into baaos
- ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
- DataOutputStream dos = new DataOutputStream(baaos);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(doc, dos);
- byte[] data = baaos.toByteArray();
-
- // use binary tokenizer and compare with Java tokenizer
- String[] cmpTokens = doc.split(new String(new char[] { delimiter }));
- int cmpCounter = 0;
-
- tok.reset(data, 0, data.length);
- while(tok.hasNext()) {
- tok.next();
-
- // write token to outputstream
- ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
- tok.writeToken(dosWrite);
+ // testing DelimitedUTF8StringBinaryTokenizer
+ @Test
+ public void test01() throws Exception {
+ Random rnd = new Random(50);
- // deserialize token to get string object
- ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
- DataInput dataIn = new DataInputStream(inStream);
- String s = UTF8StringSerializerDeserializer.INSTANCE.deserialize(dataIn);
-
- Assert.assertEquals(s, cmpTokens[cmpCounter++]);
- }
- }
- }
-
- // testing HashedQGramUTF8StringBinaryTokenizer
- @Test
- public void test02() throws Exception {
- Random rnd = new Random(50);
-
- int numStrings = 1000;
- int maxStrLen = 100;
- int minQ = 2;
- int maxQ = 10;
-
- // we test the correctness of HashedQGramUTF8StringBinaryTokenizer as follows:
- // 1.1. tokenize the string into q-gram strings
- // 1.2. serialize q-gram strings into bytes
- // 1.3. compute hashed gram with UTF8StringBinaryHashFunctionFactory
- // 2.1. serialize string into bytes
- // 2.2. tokenize serialized string into hashed q-grams
- // 2.3. test whether hashed grams from 1.3. and 2.3. are equal
- for(int i = 0; i < numStrings; i++) {
- int q = (Math.abs(rnd.nextInt()) % (maxQ - minQ)) + minQ;
- int strLen = (Math.abs(rnd.nextInt()) % (maxStrLen - q)) + q;
- String str = randomString(strLen, rnd);
-
- // randomly choose pre and postfixing
- boolean prePost = false;
- if(Math.abs(rnd.nextInt()) % 2 == 0) prePost = true;
-
- HashedQGramUTF8StringBinaryTokenizer qgramTok = new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
-
- String extendedString = str;
- if(prePost) {
- // pre and postfix string
- StringBuilder strBuilder = new StringBuilder();
- for(int j = 0; j < q - 1; j++) strBuilder.append(qgramTok.getPreChar());
- strBuilder.append(str);
- for(int j = 0; j < q - 1; j++) strBuilder.append(qgramTok.getPostChar());
- extendedString = strBuilder.toString();
- }
-
- // generate q-grams in deserialized form
- ArrayList<String> javaGrams = new ArrayList<String>();
- for(int j = 0; j < extendedString.length() - q + 1; j++) {
- javaGrams.add(extendedString.substring(j, j + q));
- }
-
- // serialize string for use in binary gram tokenizer
- ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
- DataOutputStream dos = new DataOutputStream(baaos);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(str, dos);
- byte[] data = baaos.toByteArray();
-
- qgramTok.reset(data, 0, data.length);
-
- int counter = 0;
- while(qgramTok.hasNext()) {
- qgramTok.next();
-
- // write token to outputstream
- ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
- qgramTok.writeToken(dosWrite);
+ int numDocs = 100;
+ int maxWords = 1000;
+ int maxWordLength = 50;
+ char delimiter = ' ';
- // deserialize token to get hashed gram
- ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
- DataInput dataIn = new DataInputStream(inStream);
- Integer binHashedGram = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
-
- // create hashed gram to test against
- ByteArrayAccessibleOutputStream baaosCmp = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosCmp = new DataOutputStream(baaosCmp);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(javaGrams.get(counter), dosCmp);
-
- IBinaryHashFunction strHasher = UTF8StringBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
- byte[] cmpData = baaosCmp.toByteArray();
- int cmpHash = strHasher.hash(cmpData, 0, cmpData.length);
-
- Assert.assertEquals(binHashedGram.intValue(), cmpHash);
-
- counter++;
- }
- }
- }
-
- public static String randomString(int length, Random random) {
+ DelimitedUTF8StringBinaryTokenizer tok = new DelimitedUTF8StringBinaryTokenizer(delimiter);
+
+ // create a bunch of documents
+ for (int i = 0; i < numDocs; i++) {
+
+ // create a single document with a bunch of words
+ int words = (Math.abs(rnd.nextInt()) % maxWords) + 1;
+ StringBuilder strBuilder = new StringBuilder();
+ for (int j = 0; j < words; j++) {
+ int len = (Math.abs(rnd.nextInt()) % maxWordLength) + 1;
+ String s = randomString(len, rnd);
+ strBuilder.append(s);
+ if (j < words - 1)
+ strBuilder.append(delimiter);
+ }
+
+ String doc = strBuilder.toString();
+
+ // serialize document into baaos
+ ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dos = new DataOutputStream(baaos);
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(doc, dos);
+ byte[] data = baaos.toByteArray();
+
+ // use binary tokenizer and compare with Java tokenizer
+ String[] cmpTokens = doc.split(new String(new char[] { delimiter }));
+ int cmpCounter = 0;
+
+ tok.reset(data, 0, data.length);
+ while (tok.hasNext()) {
+ tok.next();
+
+ // write token to outputstream
+ ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
+ tok.writeToken(dosWrite);
+
+ // deserialize token to get string object
+ ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
+ DataInput dataIn = new DataInputStream(inStream);
+ String s = UTF8StringSerializerDeserializer.INSTANCE.deserialize(dataIn);
+
+ Assert.assertEquals(s, cmpTokens[cmpCounter++]);
+ }
+ }
+ }
+
+ // testing HashedQGramUTF8StringBinaryTokenizer
+ @Test
+ public void test02() throws Exception {
+ Random rnd = new Random(50);
+
+ int numStrings = 1000;
+ int maxStrLen = 100;
+ int minQ = 2;
+ int maxQ = 10;
+
+ // we test the correctness of HashedQGramUTF8StringBinaryTokenizer as
+ // follows:
+ // 1.1. tokenize the string into q-gram strings
+ // 1.2. serialize q-gram strings into bytes
+ // 1.3. compute hashed gram with UTF8StringBinaryHashFunctionFactory
+ // 2.1. serialize string into bytes
+ // 2.2. tokenize serialized string into hashed q-grams
+ // 2.3. test whether hashed grams from 1.3. and 2.3. are equal
+ for (int i = 0; i < numStrings; i++) {
+ int q = (Math.abs(rnd.nextInt()) % (maxQ - minQ)) + minQ;
+ int strLen = (Math.abs(rnd.nextInt()) % (maxStrLen - q)) + q;
+ String str = randomString(strLen, rnd);
+
+ // randomly choose pre and postfixing
+ boolean prePost = false;
+ if (Math.abs(rnd.nextInt()) % 2 == 0)
+ prePost = true;
+
+ HashedQGramUTF8StringBinaryTokenizer qgramTok = new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
+
+ String extendedString = str;
+ if (prePost) {
+ // pre and postfix string
+ StringBuilder strBuilder = new StringBuilder();
+ for (int j = 0; j < q - 1; j++)
+ strBuilder.append(qgramTok.getPreChar());
+ strBuilder.append(str);
+ for (int j = 0; j < q - 1; j++)
+ strBuilder.append(qgramTok.getPostChar());
+ extendedString = strBuilder.toString();
+ }
+
+ // generate q-grams in deserialized form
+ ArrayList<String> javaGrams = new ArrayList<String>();
+ for (int j = 0; j < extendedString.length() - q + 1; j++) {
+ javaGrams.add(extendedString.substring(j, j + q));
+ }
+
+ // serialize string for use in binary gram tokenizer
+ ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dos = new DataOutputStream(baaos);
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(str, dos);
+ byte[] data = baaos.toByteArray();
+
+ qgramTok.reset(data, 0, data.length);
+
+ int counter = 0;
+ while (qgramTok.hasNext()) {
+ qgramTok.next();
+
+ // write token to outputstream
+ ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
+ qgramTok.writeToken(dosWrite);
+
+ // deserialize token to get hashed gram
+ ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
+ DataInput dataIn = new DataInputStream(inStream);
+ Integer binHashedGram = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+
+ // create hashed gram to test against
+ ByteArrayAccessibleOutputStream baaosCmp = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dosCmp = new DataOutputStream(baaosCmp);
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(javaGrams.get(counter), dosCmp);
+
+ IBinaryHashFunction strHasher = UTF8StringBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
+ byte[] cmpData = baaosCmp.toByteArray();
+ int cmpHash = strHasher.hash(cmpData, 0, cmpData.length);
+
+ Assert.assertEquals(binHashedGram.intValue(), cmpHash);
+
+ counter++;
+ }
+ }
+ }
+
+ public static String randomString(int length, Random random) {
int maxAttempts = 1000;
int count = 0;
- while(count < maxAttempts) {
- String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- if(strBuilder.length() > 0) return strBuilder.toString();
- count++;
- }
+ while (count < maxAttempts) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ if (strBuilder.length() > 0)
+ return strBuilder.toString();
+ count++;
+ }
return "abc";
}
}
diff --git a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBufferCacheProvider.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
similarity index 72%
rename from hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBufferCacheProvider.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
index c1a0eb4..ad6f743 100644
--- a/hyracks/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBufferCacheProvider.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/IStorageManagerInterface.java
@@ -12,15 +12,15 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.common;
import java.io.Serializable;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-public interface IBufferCacheProvider extends Serializable {
- public IBufferCache getBufferCache();
- public FileManager getFileManager();
-}
+public interface IStorageManagerInterface extends Serializable {
+ public IBufferCache getBufferCache();
+
+ public IFileMapProvider getFileMapProvider();
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
index 3dcf8dc..7578f39 100644
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
@@ -14,20 +14,27 @@
*/
package edu.uci.ics.hyracks.storage.common.buffercache;
+import java.io.File;
import java.io.IOException;
+import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
+import java.util.logging.Level;
+import java.util.logging.Logger;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.common.file.FileInfo;
-import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.FileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
public class BufferCache implements IBufferCacheInternal {
+ private static final Logger LOGGER = Logger.getLogger(BufferCache.class.getName());
private static final int MAP_FACTOR = 2;
private static final int MAX_VICTIMIZATION_TRY_COUNT = 3;
@@ -37,13 +44,14 @@
private final CachedPage[] cachedPages;
private final CacheBucket[] pageMap;
private final IPageReplacementStrategy pageReplacementStrategy;
- private final FileManager fileManager;
+ private final IFileMapManager fileMapManager;
private final CleanerThread cleanerThread;
+ private final Map<Integer, FileHandle> fileInfoMap;
private boolean closed;
public BufferCache(ICacheMemoryAllocator allocator, IPageReplacementStrategy pageReplacementStrategy,
- FileManager fileManager, int pageSize, int numPages) {
+ IFileMapManager fileMapManager, int pageSize, int numPages) {
this.pageSize = pageSize;
this.numPages = numPages;
pageReplacementStrategy.setBufferCache(this);
@@ -57,7 +65,8 @@
pageMap[i] = new CacheBucket();
}
this.pageReplacementStrategy = pageReplacementStrategy;
- this.fileManager = fileManager;
+ this.fileMapManager = fileMapManager;
+ fileInfoMap = new HashMap<Integer, FileHandle>();
cleanerThread = new CleanerThread();
cleanerThread.start();
closed = false;
@@ -269,21 +278,31 @@
}
private void read(CachedPage cPage) throws HyracksDataException {
- FileInfo fInfo = fileManager.getFileInfo(FileInfo.getFileId(cPage.dpid));
+ FileHandle fInfo = getFileInfo(cPage);
try {
cPage.buffer.clear();
- fInfo.getFileChannel().read(cPage.buffer, (long) FileInfo.getPageId(cPage.dpid) * pageSize);
+ fInfo.getFileChannel().read(cPage.buffer, (long) FileHandle.getPageId(cPage.dpid) * pageSize);
} catch (IOException e) {
throw new HyracksDataException(e);
}
}
+ private FileHandle getFileInfo(CachedPage cPage) throws HyracksDataException {
+ synchronized (fileInfoMap) {
+ FileHandle fInfo = fileInfoMap.get(FileHandle.getFileId(cPage.dpid));
+ if (fInfo == null) {
+ throw new HyracksDataException("No such file mapped");
+ }
+ return fInfo;
+ }
+ }
+
private void write(CachedPage cPage) throws HyracksDataException {
- FileInfo fInfo = fileManager.getFileInfo(FileInfo.getFileId(cPage.dpid));
+ FileHandle fInfo = getFileInfo(cPage);
try {
cPage.buffer.position(0);
cPage.buffer.limit(pageSize);
- fInfo.getFileChannel().write(cPage.buffer, (long) FileInfo.getPageId(cPage.dpid) * pageSize);
+ fInfo.getFileChannel().write(cPage.buffer, (long) FileHandle.getPageId(cPage.dpid) * pageSize);
} catch (IOException e) {
throw new HyracksDataException(e);
}
@@ -339,6 +358,10 @@
pageReplacementStrategy.notifyCachePageReset(this);
}
+ public void invalidate() {
+ reset(-1);
+ }
+
@Override
public ByteBuffer getBuffer() {
return buffer;
@@ -467,4 +490,120 @@
}
}
}
+
+ @Override
+ public void createFile(String fileName) throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Creating file: " + fileName + " in cache: " + this);
+ }
+ synchronized (fileInfoMap) {
+ fileMapManager.registerFile(fileName);
+ }
+ }
+
+ @Override
+ public void openFile(int fileId) throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Opening file: " + fileId + " in cache: " + this);
+ }
+ synchronized (fileInfoMap) {
+ FileHandle fInfo;
+ fInfo = fileInfoMap.get(fileId);
+ if (fInfo == null) {
+ String fileName = fileMapManager.lookupFileName(fileId);
+ try {
+ File f = new File(fileName);
+ if (!f.exists()) {
+ File dir = new File(f.getParent());
+ dir.mkdirs();
+ }
+ fInfo = new FileHandle(fileId, new RandomAccessFile(f, "rw"));
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ fileInfoMap.put(fileId, fInfo);
+ }
+ fInfo.incReferenceCount();
+ }
+ }
+
+ private void sweepAndFlush(int fileId) throws HyracksDataException {
+ for (int i = 0; i < pageMap.length; ++i) {
+ CacheBucket bucket = pageMap[i];
+ bucket.bucketLock.lock();
+ try {
+ CachedPage prev = bucket.cachedPage;
+ while (prev != null) {
+ CachedPage cPage = prev.next;
+ if (cPage == null) {
+ break;
+ }
+ if (invalidateIfFileIdMatch(fileId, cPage)) {
+ prev.next = cPage.next;
+ cPage.next = null;
+ } else {
+ prev = cPage;
+ }
+ }
+ // Take care of the head of the chain.
+ if (bucket.cachedPage != null) {
+ if (invalidateIfFileIdMatch(fileId, bucket.cachedPage)) {
+ CachedPage cPage = bucket.cachedPage;
+ bucket.cachedPage = bucket.cachedPage.next;
+ cPage.next = null;
+ }
+ }
+ } finally {
+ bucket.bucketLock.unlock();
+ }
+ }
+ }
+
+ private boolean invalidateIfFileIdMatch(int fileId, CachedPage cPage) throws HyracksDataException {
+ if (FileHandle.getFileId(cPage.dpid) == fileId) {
+ if (cPage.dirty.get()) {
+ write(cPage);
+ cPage.dirty.set(false);
+ cPage.pinCount.decrementAndGet();
+ }
+ if (cPage.pinCount.get() != 0) {
+ throw new IllegalStateException("Page is pinned and file is being closed");
+ }
+ cPage.invalidate();
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public void closeFile(int fileId) throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Closing file: " + fileId + " in cache: " + this);
+ }
+ synchronized (fileInfoMap) {
+ FileHandle fInfo = fileInfoMap.get(fileId);
+ if (fInfo == null) {
+ throw new HyracksDataException("Closing unopened file");
+ }
+ if (fInfo.decReferenceCount() <= 0) {
+ sweepAndFlush(fileId);
+ fileInfoMap.remove(fileId);
+ fInfo.close();
+ }
+ }
+ }
+
+ @Override
+ public synchronized void deleteFile(int fileId) throws HyracksDataException {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deleting file: " + fileId + " in cache: " + this);
+ }
+ synchronized (fileInfoMap) {
+ FileHandle fInfo = fileInfoMap.get(fileId);
+ if (fInfo != null) {
+ throw new HyracksDataException("Deleting open file");
+ }
+ fileMapManager.unregisterFile(fileId);
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
index 34bfc2f..2e7181e 100644
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
@@ -17,6 +17,14 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface IBufferCache {
+ public void createFile(String fileName) throws HyracksDataException;
+
+ public void openFile(int fileId) throws HyracksDataException;
+
+ public void closeFile(int fileId) throws HyracksDataException;
+
+ public void deleteFile(int fileId) throws HyracksDataException;
+
public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException;
public void unpin(ICachedPage page) throws HyracksDataException;
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileInfo.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileHandle.java
similarity index 69%
rename from hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileInfo.java
rename to hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileHandle.java
index 9b9270d..ddbedb4 100644
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileInfo.java
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileHandle.java
@@ -14,17 +14,23 @@
*/
package edu.uci.ics.hyracks.storage.common.file;
+import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
+import java.util.concurrent.atomic.AtomicInteger;
-public class FileInfo {
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class FileHandle {
private final int fileId;
private final RandomAccessFile file;
+ private final AtomicInteger refCount;
private FileChannel channel;
- public FileInfo(int fileId, RandomAccessFile file) {
+ public FileHandle(int fileId, RandomAccessFile file) {
this.fileId = fileId;
this.file = file;
+ refCount = new AtomicInteger();
channel = file.getChannel();
}
@@ -40,6 +46,23 @@
return channel;
}
+ public void close() throws HyracksDataException {
+ try {
+ channel.close();
+ file.close();
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ public int incReferenceCount() {
+ return refCount.incrementAndGet();
+ }
+
+ public int decReferenceCount() {
+ return refCount.decrementAndGet();
+ }
+
public long getDiskPageId(int pageId) {
return getDiskPageId(fileId, pageId);
}
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileManager.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileManager.java
deleted file mode 100644
index 86e236d..0000000
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/FileManager.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-
-public class FileManager {
- private final Map<Integer, FileInfo> fileRegistry;
-
- public FileManager() {
- fileRegistry = new HashMap<Integer, FileInfo>();
- }
-
- public void registerFile(FileInfo fInfo) throws HyracksDataException {
- if (fileRegistry.containsKey(fInfo.getFileId())) {
- throw new HyracksDataException("File " + fInfo.getFile().toString() + " with id " + fInfo.getFileId() + " is already registered");
- }
- fileRegistry.put(fInfo.getFileId(), fInfo);
- }
-
- public FileInfo unregisterFile(int fileId) throws HyracksDataException {
- if (!fileRegistry.containsKey(fileId)) {
- throw new HyracksDataException("File with id " + fileId + " not in registry");
- }
- return fileRegistry.remove(fileId);
- }
-
- public FileInfo getFileInfo(int fileId) throws HyracksDataException {
- FileInfo fInfo = fileRegistry.get(fileId);
- if (fInfo == null) {
- throw new HyracksDataException("File with id " + fileId + " not in registry");
- }
- return fInfo;
- }
-
- public void close() {
- for (FileInfo fInfo : fileRegistry.values()) {
- try {
- fInfo.getFileChannel().close();
- } catch (IOException e) {
- e.printStackTrace();
- }
- }
- }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java
new file mode 100644
index 0000000..d37e576
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapManager.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common.file;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Maintains the mapping between file names and file ids.
+ *
+ * @author vinayakb
+ */
+public interface IFileMapManager extends IFileMapProvider {
+ /**
+ * Register a new file name.
+ *
+ * @param fileName
+ * - the name of the file to register
+ * @throws HyracksDataException
+ * - if a mapping for the file already exists.
+ */
+ public void registerFile(String fileName) throws HyracksDataException;
+
+ /**
+ * Unregister a file mapping
+ *
+ * @param fileId
+ * - The file id whose mapping is to be unregistered.
+ * @throws HyracksDataException
+ * - If the fileid is not mapped currently in this manager.
+ */
+ public void unregisterFile(int fileId) throws HyracksDataException;
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java
new file mode 100644
index 0000000..38d9f8c
--- /dev/null
+++ b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMapProvider.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.common.file;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IFileMapProvider extends Serializable {
+ /**
+ * Indicates if a given fileId is mapped
+ *
+ * @param fileId
+ * @return <code>true</code> if the given fileId is mapped, <code>false</code> otherwise.
+ */
+ public boolean isMapped(int fileId);
+
+ /**
+ * Indicates if a given file name is mapped.
+ *
+ * @param fileName
+ * @return <code>true</code> if the given file name is mapped, <code>false</code> otherwise.
+ */
+ public boolean isMapped(String fileName);
+
+ /**
+ * Lookup the file id for a file name
+ *
+ * @param fileName
+ * - The file name whose id should be looked up.
+ * @return The file id
+ * @throws HyracksDataException
+ * - If the file name is not currently mapped in this manager.
+ */
+ public int lookupFileId(String fileName) throws HyracksDataException;
+
+ /**
+ * Lookup the file name for a file id
+ *
+ * @param fileId
+ * - The file id whose name should be looked up.
+ * @return The file name
+ * @throws HyracksDataException
+ * - If the file id is not mapped currently in this manager.
+ */
+ public String lookupFileName(int fileId) throws HyracksDataException;
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMappingProvider.java b/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMappingProvider.java
deleted file mode 100644
index 6fb44a1..0000000
--- a/hyracks/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/IFileMappingProvider.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.common.file;
-
-import java.io.Serializable;
-
-public interface IFileMappingProvider extends Serializable {
- /**
- * Provide the mapping from a file name to an integer id.
- *
- * @param name
- * - Name of the file
- * @param create
- * - Indicate if a new mapping should be created if one does not exist
- * @return The file id on a successful lookup, null if unsuccessful.
- */
- public Integer mapNameToFileId(String name, boolean create);
-
- /**
- * Remove the mapping from a file name to an integer id.
- *
- * @param name
- * - Name of the file
- *
- * @return void
- */
- public void unmapName(String name);
-
- /**
- * Get file id of an already mapped file
- *
- * @param name
- * - Name of the file
- *
- * @return The file id on a successful lookup, null if unsuccessful.
- */
- public Integer getFileId(String name);
-}
\ No newline at end of file