Merged hyracks_indexes into branch
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_dev_next@529 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-cli/.classpath b/hyracks-cli/.classpath
index ba0bb5a..93843eb 100644
--- a/hyracks-cli/.classpath
+++ b/hyracks-cli/.classpath
@@ -1,7 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java"/>
- <classpathentry kind="src" path="target/generated-sources/javacc"/>
+ <classpathentry kind="src" output="target/classes" path="target/generated-sources/javacc"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
diff --git a/hyracks-cli/.settings/org.eclipse.jdt.core.prefs b/hyracks-cli/.settings/org.eclipse.jdt.core.prefs
index f7947fa..a80ec7b 100644
--- a/hyracks-cli/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-cli/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Thu Aug 05 10:16:23 PDT 2010
+#Fri May 20 19:34:08 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs b/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs
index 1b67f41..375e12e 100644
--- a/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-control-cc/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Fri Jul 30 17:52:26 PDT 2010
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs
index 9387c03..450f5c4 100644
--- a/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-control-common/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Fri Jul 30 07:32:49 PDT 2010
+#Fri May 20 19:34:04 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-control-nc/.settings/org.eclipse.jdt.core.prefs b/hyracks-control-nc/.settings/org.eclipse.jdt.core.prefs
index 05fa00c..dfac000 100644
--- a/hyracks-control-nc/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-control-nc/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Tue Sep 28 12:04:33 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
index 8496bf4..450f5c4 100644
--- a/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-dataflow-common/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Thu Jul 29 14:49:35 PDT 2010
+#Fri May 20 19:34:04 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java
new file mode 100644
index 0000000..983dcb8
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/comparators/DoubleBinaryComparatorFactory.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.dataflow.common.data.comparators;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+
+public class DoubleBinaryComparatorFactory implements IBinaryComparatorFactory {
+ private static final long serialVersionUID = 1L;
+
+ public static final DoubleBinaryComparatorFactory INSTANCE = new DoubleBinaryComparatorFactory();
+
+ private DoubleBinaryComparatorFactory() {
+ }
+
+ @Override
+ public IBinaryComparator createBinaryComparator() {
+ return new IBinaryComparator() {
+ @Override
+ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
+ return Double.compare(DoubleSerializerDeserializer.getDouble(b1, s1), DoubleSerializerDeserializer
+ .getDouble(b2, s2));
+ }
+ };
+ }
+}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java
new file mode 100644
index 0000000..82c80e6
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/DoubleSerializerDeserializer.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.dataflow.common.data.marshalling;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class DoubleSerializerDeserializer implements ISerializerDeserializer<Double> {
+ private static final long serialVersionUID = 1L;
+
+ public static final DoubleSerializerDeserializer INSTANCE = new DoubleSerializerDeserializer();
+
+ private DoubleSerializerDeserializer() {
+ }
+
+ @Override
+ public Double deserialize(DataInput in) throws HyracksDataException {
+ try {
+ return in.readDouble();
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void serialize(Double instance, DataOutput out) throws HyracksDataException {
+ try {
+ out.writeDouble(instance.doubleValue());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ public static double getDouble(byte[] bytes, int offset) {
+ return Double.longBitsToDouble(getLongBits(bytes, offset));
+ }
+
+ public static int getIntBits(byte[] bytes, int offset) {
+ return IntegerSerializerDeserializer.getInt(bytes, offset);
+ }
+
+ public static long getLongBits(byte[] bytes, int offset) {
+ return Integer64SerializerDeserializer.getLong(bytes, offset);
+ }
+
+
+}
\ No newline at end of file
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java
new file mode 100644
index 0000000..effc092
--- /dev/null
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/marshalling/Integer64SerializerDeserializer.java
@@ -0,0 +1,44 @@
+package edu.uci.ics.hyracks.dataflow.common.data.marshalling;
+
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public class Integer64SerializerDeserializer implements ISerializerDeserializer<Long> {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final Integer64SerializerDeserializer INSTANCE = new Integer64SerializerDeserializer();
+
+ private Integer64SerializerDeserializer() {
+ }
+
+ @Override
+ public Long deserialize(DataInput in) throws HyracksDataException {
+ try {
+ return in.readLong();
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void serialize(Long instance, DataOutput out) throws HyracksDataException {
+ try {
+ out.writeLong(instance.longValue());
+ } catch (IOException ioe) {
+ throw new HyracksDataException(ioe);
+ }
+ }
+
+ public static long getLong(byte[] bytes, int offset) {
+ return (((long) (bytes[offset] & 0xff)) << 56) + (((long) (bytes[offset + 1] & 0xff)) << 48)
+ + (((long) (bytes[offset + 2] & 0xff)) << 40) + (((long) (bytes[offset + 3] & 0xff)) << 32)
+ + (((long) (bytes[offset + 4] & 0xff)) << 24) + (((long) (bytes[offset + 5] & 0xff)) << 16)
+ + (((long) (bytes[offset + 6] & 0xff)) << 8) + (((long) (bytes[offset + 7] & 0xff)) << 0);
+ }
+
+}
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
index dbe6386..d11245b 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/data/util/StringUtils.java
@@ -14,56 +14,199 @@
*/
package edu.uci.ics.hyracks.dataflow.common.data.util;
+import java.io.DataOutput;
+import java.io.IOException;
+
public class StringUtils {
- public static int charSize(byte[] b, int s) {
- int c = (int) b[s] & 0xff;
- switch (c >> 4) {
- case 0:
- case 1:
- case 2:
- case 3:
- case 4:
- case 5:
- case 6:
- case 7:
- return 1;
+ public static char charAt(byte[] b, int s) {
+ int c = b[s] & 0xff;
+ switch (c >> 4) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ return (char) c;
- case 12:
- case 13:
- return 2;
+ case 12:
+ case 13:
+ return (char) (((c & 0x1F) << 6) | ((b[s + 1]) & 0x3F));
- case 14:
- return 3;
- }
- throw new IllegalStateException();
- }
+ case 14:
+ return (char) (((c & 0x0F) << 12) | (((b[s + 1]) & 0x3F) << 6) | (((b[s + 2]) & 0x3F) << 0));
- public static char charAt(byte[] b, int s) {
- int c = (int) b[s] & 0xff;
- switch (c >> 4) {
- case 0:
- case 1:
- case 2:
- case 3:
- case 4:
- case 5:
- case 6:
- case 7:
- return (char) c;
+ default:
+ throw new IllegalArgumentException();
+ }
+ }
- case 12:
- case 13:
- return (char) (((c & 0x1F) << 6) | (((int) b[s + 1]) & 0x3F));
+ public static int charSize(byte[] b, int s) {
+ int c = b[s] & 0xff;
+ switch (c >> 4) {
+ case 0:
+ case 1:
+ case 2:
+ case 3:
+ case 4:
+ case 5:
+ case 6:
+ case 7:
+ return 1;
- case 14:
- return (char) (((c & 0x0F) << 12) | ((((int) b[s + 1]) & 0x3F) << 6) | ((((int) b[s + 2]) & 0x3F) << 0));
+ case 12:
+ case 13:
+ return 2;
- default:
- throw new IllegalArgumentException();
- }
- }
+ case 14:
+ return 3;
+ }
+ throw new IllegalStateException();
+ }
- public static int getUTFLen(byte[] b, int s) {
- return ((b[s] & 0xff) << 8) + ((b[s + 1] & 0xff) << 0);
- }
+ public static int getModifiedUTF8Len(char c) {
+ if (c >= 0x0000 && c <= 0x007F) {
+ return 1;
+ } else if (c <= 0x07FF) {
+ return 2;
+ } else {
+ return 3;
+ }
+ }
+
+ public static int getStrLen(byte[] b, int s) {
+ int pos = s + 2;
+ int end = pos + getUTFLen(b, s);
+ int charCount = 0;
+ while (pos < end) {
+ charCount++;
+ pos += charSize(b, pos);
+ }
+ return charCount;
+ }
+
+ public static int getUTFLen(byte[] b, int s) {
+ return ((b[s] & 0xff) << 8) + ((b[s + 1] & 0xff) << 0);
+ }
+
+ public static char toLowerCase(char c) {
+ switch (c) {
+ case 'A':
+ return 'a';
+ case 'B':
+ return 'b';
+ case 'C':
+ return 'c';
+ case 'D':
+ return 'd';
+ case 'E':
+ return 'e';
+ case 'F':
+ return 'f';
+ case 'G':
+ return 'g';
+ case 'H':
+ return 'h';
+ case 'I':
+ return 'i';
+ case 'J':
+ return 'j';
+ case 'K':
+ return 'k';
+ case 'L':
+ return 'l';
+ case 'M':
+ return 'm';
+ case 'N':
+ return 'n';
+ case 'O':
+ return 'o';
+ case 'P':
+ return 'p';
+ case 'Q':
+ return 'q';
+ case 'R':
+ return 'r';
+ case 'S':
+ return 's';
+ case 'T':
+ return 't';
+ case 'U':
+ return 'u';
+ case 'V':
+ return 'v';
+ case 'W':
+ return 'w';
+ case 'X':
+ return 'x';
+ case 'Y':
+ return 'y';
+ case 'Z':
+ return 'z';
+ case 'Ä':
+ return 'ä';
+ case 'Ǟ':
+ return 'ǟ';
+ case 'Ë':
+ return 'ë';
+ case 'Ḧ':
+ return 'ḧ';
+ case 'Ï':
+ return 'ï';
+ case 'Ḯ':
+ return 'ḯ';
+ case 'Ö':
+ return 'ö';
+ case 'Ȫ':
+ return 'ȫ';
+ case 'Ṏ':
+ return 'ṏ';
+ case 'Ü':
+ return 'ü';
+ case 'Ǖ':
+ return 'ǖ';
+ case 'Ǘ':
+ return 'ǘ';
+ case 'Ǚ':
+ return 'ǚ';
+ case 'Ǜ':
+ return 'ǜ';
+ case 'Ṳ':
+ return 'ṳ';
+ case 'Ṻ':
+ return 'ṻ';
+ case 'Ẅ':
+ return 'ẅ';
+ case 'Ẍ':
+ return 'ẍ';
+ case 'Ÿ':
+ return 'ÿ';
+ default:
+ // since I probably missed some chars above
+ // use Java to convert to lower case to be safe
+ return Character.toLowerCase(c);
+ }
+ }
+
+ public static void writeCharAsModifiedUTF8(char c, DataOutput dos)
+ throws IOException {
+
+ if (c >= 0x0000 && c <= 0x007F) {
+ dos.writeByte(c);
+ } else if (c <= 0x07FF) {
+ dos.writeByte((byte) (0xC0 | ((c >> 6) & 0x3F)));
+ dos.writeByte((byte) (0x80 | (c & 0x3F)));
+ } else {
+ dos.writeByte((byte) (0xE0 | ((c >> 12) & 0x0F)));
+ dos.writeByte((byte) (0x80 | ((c >> 6) & 0x3F)));
+ dos.writeByte((byte) (0x80 | (c & 0x3F)));
+ }
+ }
+
+ public static void writeUTF8Len(int len, DataOutput dos) throws IOException {
+ dos.write((len >>> 8) & 0xFF);
+ dos.write((len >>> 0) & 0xFF);
+ }
}
\ No newline at end of file
diff --git a/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs b/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs
index 1ca1bd4..450f5c4 100644
--- a/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-dataflow-std/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Thu Jul 29 14:32:56 PDT 2010
+#Fri May 20 19:34:04 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-documentation/.classpath b/hyracks-documentation/.classpath
index 3f62785..d0bec0f 100644
--- a/hyracks-documentation/.classpath
+++ b/hyracks-documentation/.classpath
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
diff --git a/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs b/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs
index 15a7e04..75a3d16 100644
--- a/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-documentation/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,6 @@
-#Tue Oct 19 13:07:01 PDT 2010
+#Thu Aug 04 11:50:30 PDT 2011
eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
+org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
+org.eclipse.jdt.core.compiler.source=1.5
diff --git a/hyracks-examples/btree-example/btreeapp/.classpath b/hyracks-examples/btree-example/btreeapp/.classpath
index 3f62785..d0bec0f 100644
--- a/hyracks-examples/btree-example/btreeapp/.classpath
+++ b/hyracks-examples/btree-example/btreeapp/.classpath
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
diff --git a/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs
index fdf1b32..75a3d16 100644
--- a/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/btree-example/btreeapp/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,6 @@
-#Wed Oct 06 08:06:49 PDT 2010
+#Thu Aug 04 11:50:30 PDT 2011
eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
+org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
+org.eclipse.jdt.core.compiler.source=1.5
diff --git a/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
index 7b596e8..7cf8ad6 100644
--- a/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/btree-example/btreeclient/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Mon Dec 20 19:05:17 PST 2010
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
index b5468ad..7aeaf77 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -41,17 +41,19 @@
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeInsertUpdateDeleteOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will insert tuples into the primary and secondary index using an insert pipeline
@@ -127,7 +129,7 @@
// run data generator on first nodecontroller given
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, dataGen, splitNCs[0]);
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// prepare insertion into primary index
@@ -141,8 +143,8 @@
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
- IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
- IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(primaryTupleWriterFactory);
// the B-Tree expects its keyfields to be at the front of its input
// tuple
@@ -154,11 +156,13 @@
primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
+
// create operator descriptor
- BTreeInsertUpdateDeleteOperatorDescriptor primaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
- recDesc, storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+ TreeIndexInsertUpdateDeleteOperatorDescriptor primaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec,
+ recDesc, storageManager, treeIndexRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, primaryFieldPermutation,
- BTreeOp.BTO_INSERT);
+ IndexOp.INSERT, opHelperFactory);
JobHelper.createPartitionConstraint(spec, primaryInsert, splitNCs);
// prepare insertion into secondary index
@@ -170,9 +174,9 @@
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
secondaryTupleWriterFactory);
- IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
+ ITreeIndexFrameFactory secondaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(secondaryTupleWriterFactory);
// the B-Tree expects its keyfields to be at the front of its input
// tuple
@@ -184,10 +188,10 @@
IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
options.secondaryBTreeName);
// create operator descriptor
- BTreeInsertUpdateDeleteOperatorDescriptor secondaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec,
- recDesc, storageManager, btreeRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
+ TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsert = new TreeIndexInsertUpdateDeleteOperatorDescriptor(spec,
+ recDesc, storageManager, treeIndexRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories,
- secondaryFieldPermutation, BTreeOp.BTO_INSERT);
+ secondaryFieldPermutation, IndexOp.INSERT, opHelperFactory);
JobHelper.createPartitionConstraint(spec, secondaryInsert, splitNCs);
// end the insert pipeline at this sink operator
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
index 204bdf5..1533714 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -39,16 +39,18 @@
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will load a primary index from randomly generated data
@@ -144,20 +146,21 @@
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// the B-Tree expects its keyfields to be at the front of its input
// tuple
int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple
// to field 0 of B-Tree tuple,
- // etc.
+ // etc.
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, storageManager,
- btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
- comparatorFactories, fieldPermutation, 0.7f);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
+ TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, storageManager,
+ treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, fieldPermutation, 0.7f, opHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
// distribute the records from the datagen via hashing to the bulk load
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
index a7ee0dc..c991a89 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexEnlistFilesExample.java
@@ -31,15 +31,17 @@
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeFileEnlistmentOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexFileEnlistmentOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will enlist existing files as primary index
@@ -99,18 +101,19 @@
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
- BTreeFileEnlistmentOperatorDescriptor fileEnlistmentOp = new BTreeFileEnlistmentOperatorDescriptor(spec,
- recDesc, storageManager, btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory,
- leafFrameFactory, typeTraits, comparatorFactories);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
+ TreeIndexFileEnlistmentOperatorDescriptor fileEnlistmentOp = new TreeIndexFileEnlistmentOperatorDescriptor(spec,
+ recDesc, storageManager, treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
JobHelper.createPartitionConstraint(spec, fileEnlistmentOp, splitNCs);
spec.addRoot(fileEnlistmentOp);
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
index b43efd2..7e4ae9d 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -37,15 +37,17 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will perform an ordered scan on the primary index
@@ -101,9 +103,9 @@
// create factories and providers for B-Tree
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// schema of tuples coming out of primary index
@@ -144,9 +146,10 @@
// into search op
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.btreeName);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, storageManager,
- btreeRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
- comparatorFactories, true, lowKeyFields, highKeyFields, true, true);
+ treeIndexRegistryProvider, btreeSplitProvider, interiorFrameFactory, leafFrameFactory, typeTraits,
+ comparatorFactories, true, lowKeyFields, highKeyFields, true, true, opHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeSearchOp, splitNCs);
// have each node print the results of its respective B-Tree
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
index 4940c2a..b061811 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -34,16 +34,18 @@
import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDiskOrderScanOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexDiskOrderScanOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will load a secondary index with <key, primary-index key> pairs
@@ -95,8 +97,8 @@
JobSpecification spec = new JobSpecification();
String[] splitNCs = options.ncs.split(",");
-
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// schema of tuples that we are retrieving from the primary index
@@ -118,14 +120,15 @@
// create factories and providers for primary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
- IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
- IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(primaryTupleWriterFactory);
// use a disk-order scan to read primary index
IFileSplitProvider primarySplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.primaryBTreeName);
- BTreeDiskOrderScanOperatorDescriptor btreeScanOp = new BTreeDiskOrderScanOperatorDescriptor(spec, recDesc,
- storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
+ TreeIndexDiskOrderScanOperatorDescriptor btreeScanOp = new TreeIndexDiskOrderScanOperatorDescriptor(spec, recDesc,
+ storageManager, treeIndexRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, opHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeScanOp, splitNCs);
// sort the tuples as preparation for bulk load into secondary index
@@ -147,17 +150,17 @@
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
secondaryTupleWriterFactory);
- IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
+ ITreeIndexFrameFactory secondaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(secondaryTupleWriterFactory);
// the B-Tree expects its keyfields to be at the front of its input
// tuple
int[] fieldPermutation = { 1, 0 };
IFileSplitProvider btreeSplitProvider = JobHelper.createFileSplitProvider(splitNCs, options.secondaryBTreeName);
- BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec, storageManager,
- btreeRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
- secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f);
+ TreeIndexBulkLoadOperatorDescriptor btreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec, storageManager,
+ treeIndexRegistryProvider, btreeSplitProvider, secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
+ secondaryTypeTraits, comparatorFactories, fieldPermutation, 0.7f, opHelperFactory);
JobHelper.createPartitionConstraint(spec, btreeBulkLoad, splitNCs);
// connect the ops
diff --git a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
index a770f09..5e5eaeb 100644
--- a/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
+++ b/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -37,15 +37,17 @@
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
import edu.uci.ics.hyracks.examples.btree.helper.StorageManagerInterface;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.examples.btree.helper.TreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
// This example will perform range search on the secondary index
@@ -95,7 +97,7 @@
String[] splitNCs = options.ncs.split(",");
- IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IIndexRegistryProvider<ITreeIndex> btreeRegistryProvider = TreeIndexRegistryProvider.INSTANCE;
IStorageManagerInterface storageManager = StorageManagerInterface.INSTANCE;
// schema of tuples coming out of secondary index
@@ -109,9 +111,9 @@
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(secondaryTypeTraits);
- IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
secondaryTupleWriterFactory);
- IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(secondaryTupleWriterFactory);
+ ITreeIndexFrameFactory secondaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(secondaryTupleWriterFactory);
// schema of tuples coming out of primary index
RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
@@ -127,8 +129,8 @@
// create factories and providers for secondary B-Tree
TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(primaryTypeTraits);
- IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(primaryTupleWriterFactory);
- IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(primaryTupleWriterFactory);
+ ITreeIndexFrameFactory primaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(primaryTupleWriterFactory);
// comparators for btree, note that we only need a comparator for the
// non-unique key
@@ -169,10 +171,11 @@
IFileSplitProvider secondarySplitProvider = JobHelper.createFileSplitProvider(splitNCs,
options.secondaryBTreeName);
+ ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
BTreeSearchOperatorDescriptor secondarySearchOp = new BTreeSearchOperatorDescriptor(spec, secondaryRecDesc,
storageManager, btreeRegistryProvider, secondarySplitProvider, secondaryInteriorFrameFactory,
secondaryLeafFrameFactory, secondaryTypeTraits, comparatorFactories, true, secondaryLowKeyFields,
- secondaryHighKeyFields, true, true);
+ secondaryHighKeyFields, true, true, opHelperFactory);
JobHelper.createPartitionConstraint(spec, secondarySearchOp, splitNCs);
// secondary index will output tuples with [UTF8String, Integer]
@@ -188,7 +191,7 @@
BTreeSearchOperatorDescriptor primarySearchOp = new BTreeSearchOperatorDescriptor(spec, primaryRecDesc,
storageManager, btreeRegistryProvider, primarySplitProvider, primaryInteriorFrameFactory,
primaryLeafFrameFactory, primaryTypeTraits, comparatorFactories, true, primaryLowKeyFields,
- primaryHighKeyFields, true, true);
+ primaryHighKeyFields, true, true, opHelperFactory);
JobHelper.createPartitionConstraint(spec, primarySearchOp, splitNCs);
// have each node print the results of its respective B-Tree
diff --git a/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs
index 367ddc4..dfac000 100644
--- a/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/btree-example/btreehelper/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Wed Oct 06 08:06:49 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
deleted file mode 100644
index 57b10a3..0000000
--- a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.examples.btree.helper;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-
-public class BTreeRegistryProvider implements IBTreeRegistryProvider {
- private static final long serialVersionUID = 1L;
-
- public static final BTreeRegistryProvider INSTANCE = new BTreeRegistryProvider();
-
- private BTreeRegistryProvider() {
- }
-
- @Override
- public BTreeRegistry getBTreeRegistry(IHyracksStageletContext ctx) {
- return RuntimeContext.get(ctx).getBTreeRegistry();
- }
-}
\ No newline at end of file
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
index 9f99468..9fd7df4 100644
--- a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
+++ b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
@@ -17,7 +17,8 @@
import edu.uci.ics.hyracks.api.application.INCApplicationContext;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
@@ -29,7 +30,7 @@
import edu.uci.ics.hyracks.storage.common.smi.TransientFileMapManager;
public class RuntimeContext {
- private BTreeRegistry btreeRegistry;
+ private IndexRegistry<ITreeIndex> treeIndexRegistry;
private IBufferCache bufferCache;
private IFileMapManager fileMapManager;
@@ -37,8 +38,8 @@
fileMapManager = new TransientFileMapManager();
ICacheMemoryAllocator allocator = new HeapBufferAllocator();
IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
- bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs, fileMapManager, 32768, 50);
- btreeRegistry = new BTreeRegistry();
+ bufferCache = new BufferCache(appCtx.getRootContext().getIOManager(), allocator, prs, fileMapManager, 32768, 50, 100);
+ treeIndexRegistry = new IndexRegistry<ITreeIndex>();
}
public void close() {
@@ -53,8 +54,8 @@
return fileMapManager;
}
- public BTreeRegistry getBTreeRegistry() {
- return btreeRegistry;
+ public IndexRegistry<ITreeIndex> getTreeIndexRegistry() {
+ return treeIndexRegistry;
}
public static RuntimeContext get(IHyracksStageletContext ctx) {
diff --git a/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/TreeIndexRegistryProvider.java b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/TreeIndexRegistryProvider.java
new file mode 100644
index 0000000..6a6ddf8
--- /dev/null
+++ b/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/TreeIndexRegistryProvider.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+
+public class TreeIndexRegistryProvider implements IIndexRegistryProvider<ITreeIndex> {
+ private static final long serialVersionUID = 1L;
+
+ public static final TreeIndexRegistryProvider INSTANCE = new TreeIndexRegistryProvider();
+
+ private TreeIndexRegistryProvider() {
+ }
+
+ @Override
+ public IndexRegistry<ITreeIndex> getRegistry(IHyracksStageletContext ctx) {
+ return RuntimeContext.get(ctx).getTreeIndexRegistry();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs
index e5faa3a..a80ec7b 100644
--- a/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/hyracks-integration-tests/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Thu Jul 29 15:30:35 PDT 2010
+#Fri May 20 19:34:08 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt b/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
new file mode 100644
index 0000000..f936a6e
--- /dev/null
+++ b/hyracks-examples/hyracks-integration-tests/data/orders-with-locations.txt
@@ -0,0 +1,750 @@
+1|37|O|131251.81|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among |42.3631|-71.065|42.3631|-71.065|
+2|79|O|40183.29|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot|42.1091|-70.696|42.1091|-70.696|
+3|124|F|160882.76|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos|40.8151|-73.0452|40.8151|-73.0452|
+4|137|O|31084.79|1995-10-11|5-LOW|Clerk#000000124|0|sits. slyly regular warthogs cajole. regular, regular theodolites acro|40.8151|-73.0452|40.8151|-73.0452|
+5|46|F|86615.25|1994-07-30|5-LOW|Clerk#000000925|0|quickly. bold deposits sleep slyly. packages use slyly|42.2481|-71.174|42.2481|-71.174|
+6|56|F|36468.55|1992-02-21|4-NOT SPECIFIED|Clerk#000000058|0|ggle. special, final requests are against the furiously specia|61.1201|-149.89|61.1201|-149.89|
+7|40|O|171488.73|1996-01-10|2-HIGH|Clerk#000000470|0|ly special requests |61.1501|-149.926|61.1501|-149.926|
+32|131|O|116923.00|1995-07-16|2-HIGH|Clerk#000000616|0|ise blithely bold, regular requests. quickly unusual dep|61.181|-149.814|61.181|-149.814|
+33|67|F|99798.76|1993-10-27|3-MEDIUM|Clerk#000000409|0|uriously. furiously final request|61.1517|-149.86|61.1517|-149.86|
+34|62|O|41670.02|1998-07-21|3-MEDIUM|Clerk#000000223|0|ly final packages. fluffily final deposits wake blithely ideas. spe|61.1806|-149.814|61.1806|-149.814|
+35|128|O|148789.52|1995-10-23|4-NOT SPECIFIED|Clerk#000000259|0|zzle. carefully enticing deposits nag furio|61.1806|-149.775|61.1806|-149.775|
+36|116|O|38988.98|1995-11-03|1-URGENT|Clerk#000000358|0| quick packages are blithely. slyly silent accounts wake qu|61.1806|-149.775|61.1806|-149.775|
+37|88|F|113701.89|1992-06-03|3-MEDIUM|Clerk#000000456|0|kly regular pinto beans. carefully unusual waters cajole never|61.1806|-149.775|61.1806|-149.775|
+38|125|O|46366.56|1996-08-21|4-NOT SPECIFIED|Clerk#000000604|0|haggle blithely. furiously express ideas haggle blithely furiously regular re|61.2113|-149.824|61.2113|-149.824|
+39|82|O|219707.84|1996-09-20|3-MEDIUM|Clerk#000000659|0|ole express, ironic requests: ir|61.1967|-149.877|61.1967|-149.877|
+64|34|F|20065.73|1994-07-16|3-MEDIUM|Clerk#000000661|0|wake fluffily. sometimes ironic pinto beans about the dolphin|61.2164|-149.892|61.2164|-149.892|
+65|17|P|65883.92|1995-03-18|1-URGENT|Clerk#000000632|0|ular requests are blithely pending orbits-- even requests against the deposit|61.1571|-149.883|61.1571|-149.883|
+66|130|F|79258.24|1994-01-20|5-LOW|Clerk#000000743|0|y pending requests integrate|61.2048|-149.834|61.2048|-149.834|
+67|58|O|116227.05|1996-12-19|4-NOT SPECIFIED|Clerk#000000547|0|symptotes haggle slyly around the furiously iron|61.0956|-149.843|61.0956|-149.843|
+68|29|O|215135.72|1998-04-18|3-MEDIUM|Clerk#000000440|0| pinto beans sleep carefully. blithely ironic deposits haggle furiously acro|61.1491|-149.809|61.1491|-149.809|
+69|85|F|162176.23|1994-06-04|4-NOT SPECIFIED|Clerk#000000330|0| depths atop the slyly thin deposits detect among the furiously silent accou|61.1981|-149.871|61.1981|-149.871|
+70|65|F|84651.80|1993-12-18|5-LOW|Clerk#000000322|0| carefully ironic request|61.1982|-149.876|61.1982|-149.876|
+71|4|O|178821.73|1998-01-24|4-NOT SPECIFIED|Clerk#000000271|0| express deposits along the blithely regul|61.1924|-149.909|61.1924|-149.909|
+96|109|F|55090.67|1994-04-17|2-HIGH|Clerk#000000395|0|oost furiously. pinto|61.2204|-149.728|61.2204|-149.728|
+97|22|F|68908.31|1993-01-29|3-MEDIUM|Clerk#000000547|0|hang blithely along the regular accounts. furiously even ideas after the|61.196|-149.864|61.196|-149.864|
+98|106|F|51004.44|1994-09-25|1-URGENT|Clerk#000000448|0|c asymptotes. quickly regular packages should have to nag re|61.1987|-149.889|61.1987|-149.889|
+99|89|F|92326.79|1994-03-13|4-NOT SPECIFIED|Clerk#000000973|0|e carefully ironic packages. pending|61.1984|-149.897|61.1984|-149.897|
+100|148|O|141311.01|1998-02-28|4-NOT SPECIFIED|Clerk#000000577|0|heodolites detect slyly alongside of the ent|61.1897|-149.898|61.1897|-149.898|
+101|28|O|95591.40|1996-03-17|3-MEDIUM|Clerk#000000419|0|ding accounts above the slyly final asymptote|61.1228|-149.81|61.1228|-149.81|
+102|1|O|113954.89|1997-05-09|2-HIGH|Clerk#000000596|0| slyly according to the asymptotes. carefully final packages integrate furious|61.1649|-149.881|61.1649|-149.881|
+103|31|O|95563.95|1996-06-20|4-NOT SPECIFIED|Clerk#000000090|0|ges. carefully unusual instructions haggle quickly regular f|61.1934|-149.887|61.1934|-149.887|
+128|74|F|36333.34|1992-06-15|1-URGENT|Clerk#000000385|0|ns integrate fluffily. ironic asymptotes after the regular excuses nag around |61.2164|-149.892|61.2164|-149.892|
+129|73|F|188124.55|1992-11-19|5-LOW|Clerk#000000859|0|ing tithes. carefully pending deposits boost about the silently express |61.1932|-149.886|61.1932|-149.886|
+130|37|F|115717.37|1992-05-08|2-HIGH|Clerk#000000036|0|le slyly unusual, regular packages? express deposits det|61.2072|-149.888|61.2072|-149.888|
+131|94|F|96596.81|1994-06-08|3-MEDIUM|Clerk#000000625|0|after the fluffily special foxes integrate s|61.2125|-149.904|61.2125|-149.904|
+132|28|F|118802.62|1993-06-11|3-MEDIUM|Clerk#000000488|0|sits are daringly accounts. carefully regular foxes sleep slyly about the|61.2142|-149.806|61.2142|-149.806|
+133|44|O|80437.72|1997-11-29|1-URGENT|Clerk#000000738|0|usly final asymptotes |61.1866|-149.923|61.1866|-149.923|
+134|7|F|154260.84|1992-05-01|4-NOT SPECIFIED|Clerk#000000711|0|lar theodolites boos|61.1089|-149.857|61.1089|-149.857|
+135|61|O|174569.88|1995-10-21|4-NOT SPECIFIED|Clerk#000000804|0|l platelets use according t|61.1024|-149.853|61.1024|-149.853|
+160|83|O|86076.86|1996-12-19|4-NOT SPECIFIED|Clerk#000000342|0|thely special sauternes wake slyly of t|61.1891|-149.906|61.1891|-149.906|
+161|17|F|19056.99|1994-08-31|2-HIGH|Clerk#000000322|0|carefully! special instructions sin|61.1891|-149.906|61.1891|-149.906|
+162|16|O|2158.13|1995-05-08|3-MEDIUM|Clerk#000000378|0|nts hinder fluffily ironic instructions. express, express excuses |61.1891|-149.906|61.1891|-149.906|
+163|88|O|125170.86|1997-09-05|3-MEDIUM|Clerk#000000379|0|y final packages. final foxes since the quickly even|61.1891|-149.906|61.1891|-149.906|
+164|1|F|202660.52|1992-10-21|5-LOW|Clerk#000000209|0|cajole ironic courts. slyly final ideas are slyly. blithely final Tiresias sub|61.1891|-149.906|61.1891|-149.906|
+165|28|F|141824.23|1993-01-30|4-NOT SPECIFIED|Clerk#000000292|0|across the blithely regular accounts. bold|61.1891|-149.906|61.1891|-149.906|
+166|109|O|93335.60|1995-09-12|2-HIGH|Clerk#000000440|0|lets. ironic, bold asymptotes kindle|61.1891|-149.906|61.1891|-149.906|
+167|121|F|52982.23|1993-01-04|4-NOT SPECIFIED|Clerk#000000731|0|s nag furiously bold excuses. fluffily iron|61.1891|-149.906|61.1891|-149.906|
+192|83|O|133002.55|1997-11-25|5-LOW|Clerk#000000483|0|y unusual platelets among the final instructions integrate rut|61.1891|-149.906|61.1891|-149.906|
+193|80|F|48053.18|1993-08-08|1-URGENT|Clerk#000000025|0|the furiously final pin|61.1891|-149.906|61.1891|-149.906|
+194|62|F|114097.63|1992-04-05|3-MEDIUM|Clerk#000000352|0|egular requests haggle slyly regular, regular pinto beans. asymptote|61.1891|-149.906|61.1891|-149.906|
+195|136|F|120053.52|1993-12-28|3-MEDIUM|Clerk#000000216|0|old forges are furiously sheaves. slyly fi|61.1891|-149.906|61.1891|-149.906|
+196|65|F|33248.04|1993-03-17|2-HIGH|Clerk#000000988|0|beans boost at the foxes. silent foxes|61.1891|-149.906|61.1891|-149.906|
+197|34|P|100290.07|1995-04-07|2-HIGH|Clerk#000000969|0|solve quickly about the even braids. carefully express deposits affix care|61.1891|-149.906|61.1891|-149.906|
+198|112|O|125792.83|1998-01-02|4-NOT SPECIFIED|Clerk#000000331|0|its. carefully ironic requests sleep. furiously express fox|61.1891|-149.906|61.1891|-149.906|
+199|53|O|80592.44|1996-03-07|2-HIGH|Clerk#000000489|0|g theodolites. special packag|61.1891|-149.906|61.1891|-149.906|
+224|4|F|155680.60|1994-06-18|4-NOT SPECIFIED|Clerk#000000642|0|r the quickly thin courts. carefully|61.1891|-149.906|61.1891|-149.906|
+225|34|P|165890.47|1995-05-25|1-URGENT|Clerk#000000177|0|s. blithely ironic accounts wake quickly fluffily special acc|61.1891|-149.906|61.1891|-149.906|
+226|128|F|180119.22|1993-03-10|2-HIGH|Clerk#000000756|0|s are carefully at the blithely ironic acc|61.1891|-149.906|61.1891|-149.906|
+227|10|O|46076.46|1995-11-10|5-LOW|Clerk#000000919|0| express instructions. slyly regul|61.1891|-149.906|61.1891|-149.906|
+228|46|F|2638.98|1993-02-25|1-URGENT|Clerk#000000562|0|es was slyly among the regular foxes. blithely regular dependenci|61.1891|-149.906|61.1891|-149.906|
+229|112|F|142290.77|1993-12-29|1-URGENT|Clerk#000000628|0|he fluffily even instructions. furiously i|61.1891|-149.906|61.1891|-149.906|
+230|103|F|107231.60|1993-10-27|1-URGENT|Clerk#000000520|0|odolites. carefully quick requ|61.1891|-149.906|61.1891|-149.906|
+231|91|F|141554.06|1994-09-29|2-HIGH|Clerk#000000446|0| packages haggle slyly after the carefully ironic instruct|61.1891|-149.906|61.1891|-149.906|
+256|125|F|106315.25|1993-10-19|4-NOT SPECIFIED|Clerk#000000834|0|he fluffily final ideas might are final accounts. carefully f|61.1891|-149.906|61.1891|-149.906|
+257|124|O|7102.74|1998-03-28|3-MEDIUM|Clerk#000000680|0|ts against the sly warhorses cajole slyly accounts|61.1891|-149.906|61.1891|-149.906|
+258|43|F|186669.10|1993-12-29|1-URGENT|Clerk#000000167|0|dencies. blithely quick packages cajole. ruthlessly final accounts|61.1891|-149.906|61.1891|-149.906|
+259|44|F|75661.70|1993-09-29|4-NOT SPECIFIED|Clerk#000000601|0|ages doubt blithely against the final foxes. carefully express deposits dazzle|61.1891|-149.906|61.1891|-149.906|
+260|106|O|179292.14|1996-12-10|3-MEDIUM|Clerk#000000960|0|lently regular pinto beans sleep after the slyly e|61.1891|-149.906|61.1891|-149.906|
+261|47|F|201003.12|1993-06-29|3-MEDIUM|Clerk#000000310|0|ully fluffily brave instructions. furiousl|61.1891|-149.906|61.1891|-149.906|
+262|31|O|108443.84|1995-11-25|4-NOT SPECIFIED|Clerk#000000551|0|l packages. blithely final pinto beans use carefu|61.1891|-149.906|61.1891|-149.906|
+263|118|F|79782.56|1994-05-17|2-HIGH|Clerk#000000088|0| pending instructions. blithely un|61.1891|-149.906|61.1891|-149.906|
+288|8|O|163794.53|1997-02-21|1-URGENT|Clerk#000000109|0|uriously final requests. even, final ideas det|61.1891|-149.906|61.1891|-149.906|
+289|104|O|131092.67|1997-02-10|3-MEDIUM|Clerk#000000103|0|sily. slyly special excuse|61.1891|-149.906|61.1891|-149.906|
+290|118|F|62814.89|1994-01-01|4-NOT SPECIFIED|Clerk#000000735|0|efully dogged deposits. furiou|61.1891|-149.906|61.1891|-149.906|
+291|142|F|66817.05|1994-03-13|1-URGENT|Clerk#000000923|0|dolites. carefully regular pinto beans cajol|64.8541|-147.813|64.8541|-147.813|
+292|23|F|30783.05|1992-01-13|2-HIGH|Clerk#000000193|0|g pinto beans will have to sleep f|64.8414|-147.606|64.8414|-147.606|
+293|31|F|37248.78|1992-10-02|2-HIGH|Clerk#000000629|0|re bold, ironic deposits. platelets c|64.8371|-147.746|64.8371|-147.746|
+294|52|F|30059.47|1993-07-16|3-MEDIUM|Clerk#000000499|0|kly according to the frays. final dolphins affix quickly |64.8151|-147.707|64.8151|-147.707|
+295|19|F|89345.99|1994-09-29|2-HIGH|Clerk#000000155|0| unusual pinto beans play. regular ideas haggle|64.8371|-147.746|64.8371|-147.746|
+320|1|O|39835.54|1997-11-21|2-HIGH|Clerk#000000573|0|ar foxes nag blithely|64.849|-147.813|64.849|-147.813|
+321|124|F|62251.15|1993-03-21|3-MEDIUM|Clerk#000000289|0|equests run. blithely final dependencies after the deposits wake caref|64.8425|-147.724|64.8425|-147.724|
+322|134|F|127068.89|1992-03-19|1-URGENT|Clerk#000000158|0|fully across the slyly bold packages. packages against the quickly regular i|64.8425|-147.724|64.8425|-147.724|
+323|40|F|79683.42|1994-03-26|1-URGENT|Clerk#000000959|0|arefully pending foxes sleep blithely. slyly express accoun|64.849|-147.826|64.849|-147.826|
+324|106|F|26868.85|1992-03-20|1-URGENT|Clerk#000000352|0| about the ironic, regular deposits run blithely against the excuses|64.815|-147.882|64.815|-147.882|
+325|41|F|71543.41|1993-10-17|5-LOW|Clerk#000000844|0|ly sometimes pending pa|64.8906|-147.628|64.8906|-147.628|
+326|76|O|229165.17|1995-06-04|2-HIGH|Clerk#000000466|0| requests. furiously ironic asymptotes mold carefully alongside of the blit|64.8276|-147.639|64.8276|-147.639|
+327|145|P|24468.16|1995-04-17|5-LOW|Clerk#000000992|0|ng the slyly final courts. slyly even escapades eat |64.8461|-147.813|64.8461|-147.813|
+352|107|F|16003.86|1994-03-08|2-HIGH|Clerk#000000932|0|ke slyly bold pinto beans. blithely regular accounts against the spe|64.8281|-147.812|64.8281|-147.812|
+353|2|F|179984.42|1993-12-31|5-LOW|Clerk#000000449|0| quiet ideas sleep. even instructions cajole slyly. silently spe|64.8377|-147.718|64.8377|-147.718|
+354|139|O|157062.70|1996-03-14|2-HIGH|Clerk#000000511|0|ly regular ideas wake across the slyly silent ideas. final deposits eat b|64.8417|-147.718|64.8417|-147.718|
+355|71|F|69447.25|1994-06-14|5-LOW|Clerk#000000532|0|s. sometimes regular requests cajole. regular, pending accounts a|64.8145|-147.772|64.8145|-147.772|
+356|148|F|162786.67|1994-06-30|4-NOT SPECIFIED|Clerk#000000944|0|as wake along the bold accounts. even, |64.8541|-147.813|64.8541|-147.813|
+357|61|O|98723.11|1996-10-09|2-HIGH|Clerk#000000301|0|e blithely about the express, final accounts. quickl|64.8169|-147.779|64.8169|-147.779|
+358|4|F|226806.66|1993-09-20|2-HIGH|Clerk#000000392|0|l, silent instructions are slyly. silently even de|64.8378|-147.71|64.8378|-147.71|
+359|79|F|142891.22|1994-12-19|3-MEDIUM|Clerk#000000934|0|n dolphins. special courts above the carefully ironic requests use|64.8436|-147.722|64.8436|-147.722|
+384|115|F|122785.82|1992-03-03|5-LOW|Clerk#000000206|0|, even accounts use furiously packages. slyly ironic pla|64.9401|-147.402|64.9401|-147.402|
+385|34|O|50724.06|1996-03-22|5-LOW|Clerk#000000600|0|hless accounts unwind bold pain|64.8426|-147.719|64.8426|-147.719|
+386|61|F|90380.40|1995-01-25|2-HIGH|Clerk#000000648|0| haggle quickly. stealthily bold asymptotes haggle among the furiously even re|64.8534|-147.811|64.8534|-147.811|
+387|4|O|130647.18|1997-01-26|4-NOT SPECIFIED|Clerk#000000768|0| are carefully among the quickly even deposits. furiously silent req|64.9341|-147.928|64.9341|-147.928|
+388|46|F|120533.46|1992-12-16|4-NOT SPECIFIED|Clerk#000000356|0|ar foxes above the furiously ironic deposits nag slyly final reque|64.8393|-147.72|64.8393|-147.72|
+389|127|F|1984.14|1994-02-17|2-HIGH|Clerk#000000062|0|ing to the regular asymptotes. final, pending foxes about the blithely sil|64.8406|-147.731|64.8406|-147.731|
+390|103|O|168562.27|1998-04-07|5-LOW|Clerk#000000404|0|xpress asymptotes use among the regular, final pinto b|64.9281|-147.865|64.9281|-147.865|
+391|112|F|13282.23|1994-11-17|2-HIGH|Clerk#000000256|0|orges thrash fluffil|64.8371|-147.716|64.8371|-147.716|
+416|41|F|71362.50|1993-09-27|5-LOW|Clerk#000000294|0| the accounts. fluffily bold depo|64.9414|-147.841|64.9414|-147.841|
+417|55|F|91982.29|1994-02-06|3-MEDIUM|Clerk#000000468|0|ironic, even packages. thinly unusual accounts sleep along the slyly unusual |64.8363|-147.79|64.8363|-147.79|
+418|95|P|33124.96|1995-04-13|4-NOT SPECIFIED|Clerk#000000643|0|. furiously ironic instruc|64.8371|-147.716|64.8371|-147.716|
+419|118|O|111597.96|1996-10-01|3-MEDIUM|Clerk#000000376|0|osits. blithely pending theodolites boost carefully|64.8591|-147.917|64.8591|-147.917|
+420|91|O|198039.23|1995-10-31|4-NOT SPECIFIED|Clerk#000000756|0|leep carefully final excuses. fluffily pending requests unwind carefully above|64.8363|-147.79|64.8363|-147.79|
+421|40|F|1084.38|1992-02-22|5-LOW|Clerk#000000405|0|egular, even packages according to the final, un|55.3801|-131.682|55.3801|-131.682|
+422|74|O|106045.89|1997-05-31|4-NOT SPECIFIED|Clerk#000000049|0|aggle carefully across the accounts. regular accounts eat fluffi|55.3073|-131.528|55.3073|-131.528|
+423|104|O|26981.31|1996-06-01|1-URGENT|Clerk#000000674|0|quests. deposits cajole quickly. furiously bold accounts haggle q|55.3801|-131.682|55.3801|-131.682|
+448|149|O|114978.03|1995-08-21|3-MEDIUM|Clerk#000000597|0| regular, express foxes use blithely. quic|55.3601|-131.681|55.3601|-131.681|
+449|97|O|41605.63|1995-07-20|2-HIGH|Clerk#000000841|0|. furiously regular theodolites affix blithely |55.3279|-131.613|55.3279|-131.613|
+450|49|P|153386.61|1995-03-05|4-NOT SPECIFIED|Clerk#000000293|0|d theodolites. boldly bold foxes since the pack|55.3129|-131.588|55.3129|-131.588|
+451|100|O|104664.40|1998-05-25|5-LOW|Clerk#000000048|0|nic pinto beans. theodolites poach carefully; |55.3801|-131.682|55.3801|-131.682|
+452|61|O|2007.48|1997-10-14|1-URGENT|Clerk#000000498|0|t, unusual instructions above the blithely bold pint|55.3801|-131.682|55.3801|-131.682|
+453|46|O|216826.73|1997-05-26|5-LOW|Clerk#000000504|0|ss foxes. furiously regular ideas sleep according to t|55.4299|-131.789|55.4299|-131.789|
+454|49|O|23198.24|1995-12-27|5-LOW|Clerk#000000890|0|dolites sleep carefully blithely regular deposits. quickly regul|55.3801|-131.682|55.3801|-131.682|
+455|13|O|138010.76|1996-12-04|1-URGENT|Clerk#000000796|0| about the final platelets. dependen|55.3507|-131.671|55.3507|-131.671|
+480|73|F|20530.97|1993-05-08|5-LOW|Clerk#000000004|0|ealthy pinto beans. fluffily regular requests along the special sheaves wake |55.3801|-131.682|55.3801|-131.682|
+481|31|F|117827.18|1992-10-08|2-HIGH|Clerk#000000230|0|ly final ideas. packages haggle fluffily|55.3394|-131.636|55.3394|-131.636|
+482|127|O|136634.34|1996-03-26|1-URGENT|Clerk#000000295|0|ts. deposits wake: final acco|55.3801|-131.682|55.3801|-131.682|
+483|35|O|39793.05|1995-07-11|2-HIGH|Clerk#000000025|0|cross the carefully final e|55.3103|-131.582|55.3103|-131.582|
+484|55|O|219920.62|1997-01-03|3-MEDIUM|Clerk#000000545|0|grouches use. furiously bold accounts maintain. bold, regular deposits|55.3801|-131.682|55.3801|-131.682|
+485|101|O|110432.76|1997-03-26|2-HIGH|Clerk#000000105|0| regular ideas nag thinly furiously s|55.3801|-131.682|55.3801|-131.682|
+486|52|O|185968.15|1996-03-11|4-NOT SPECIFIED|Clerk#000000803|0|riously dolphins. fluffily ironic requ|55.3801|-131.682|55.3801|-131.682|
+487|109|F|48502.79|1992-08-18|1-URGENT|Clerk#000000086|0|ithely unusual courts eat accordi|55.3801|-131.682|55.3801|-131.682|
+512|64|P|124661.48|1995-05-20|5-LOW|Clerk#000000814|0|ding requests. carefully express theodolites was quickly. furious|55.3801|-131.682|55.3801|-131.682|
+513|61|O|63703.92|1995-05-01|2-HIGH|Clerk#000000522|0|regular packages. pinto beans cajole carefully against the even|55.3424|-131.634|55.3424|-131.634|
+514|76|O|104585.77|1996-04-04|2-HIGH|Clerk#000000094|0| cajole furiously. slyly final excuses cajole. slyly special instructions |55.4097|-131.729|55.4097|-131.729|
+515|142|F|153720.22|1993-08-29|4-NOT SPECIFIED|Clerk#000000700|0|eposits are furiously furiously silent pinto beans. pending pack|55.3801|-131.682|55.3801|-131.682|
+516|44|O|10677.86|1998-04-21|2-HIGH|Clerk#000000305|0|lar, unusual platelets are carefully. even courts sleep bold, final pinto bea|55.3801|-131.682|55.3801|-131.682|
+517|10|O|82197.79|1997-04-07|5-LOW|Clerk#000000359|0|slyly pending deposits cajole quickly packages. furiou|55.3462|-131.658|55.3462|-131.658|
+518|145|O|223537.09|1998-02-08|2-HIGH|Clerk#000000768|0| the carefully bold accounts. quickly regular excuses are|55.3801|-131.682|55.3801|-131.682|
+519|64|O|95731.50|1997-10-31|1-URGENT|Clerk#000000985|0|ains doze furiously against the f|55.3801|-131.682|55.3801|-131.682|
+544|94|F|47627.89|1993-02-17|2-HIGH|Clerk#000000145|0|the special, final accounts. dogged dolphins|55.3801|-131.682|55.3801|-131.682|
+545|64|O|23476.12|1995-11-07|2-HIGH|Clerk#000000537|0|as. blithely final hockey players about th|55.3801|-131.682|55.3801|-131.682|
+546|145|O|14790.37|1996-11-01|2-HIGH|Clerk#000000041|0|osits sleep. slyly special dolphins about the q|55.3801|-131.682|55.3801|-131.682|
+547|100|O|96855.29|1996-06-22|3-MEDIUM|Clerk#000000976|0|ing accounts eat. carefully regular packa|55.3801|-131.682|55.3801|-131.682|
+548|124|F|99088.75|1994-09-21|1-URGENT|Clerk#000000435|0|arefully express instru|55.3801|-131.682|55.3801|-131.682|
+549|110|F|141679.41|1992-07-13|1-URGENT|Clerk#000000196|0|ideas alongside of |55.3801|-131.682|55.3801|-131.682|
+550|25|O|33123.28|1995-08-02|1-URGENT|Clerk#000000204|0|t requests. blithely |61.5856|-149.316|61.5856|-149.316|
+551|91|O|46355.83|1995-05-30|1-URGENT|Clerk#000000179|0|xpress accounts boost quic|61.5781|-149.429|61.5781|-149.429|
+576|31|O|18307.45|1997-05-13|3-MEDIUM|Clerk#000000955|0|l requests affix regular requests. final account|61.6141|-149.457|61.6141|-149.457|
+577|56|F|34768.68|1994-12-19|5-LOW|Clerk#000000154|0| deposits engage stealthil|61.5801|-149.461|61.5801|-149.461|
+578|94|O|70392.02|1997-01-10|5-LOW|Clerk#000000281|0|e blithely even packages. slyly pending platelets bes|61.9071|-150.067|61.9071|-150.067|
+579|68|O|120828.12|1998-03-11|2-HIGH|Clerk#000000862|0| regular instructions. blithely even p|61.5928|-149.392|61.5928|-149.392|
+580|61|O|88219.12|1997-07-05|2-HIGH|Clerk#000000314|0|tegrate fluffily regular accou|61.6141|-149.457|61.6141|-149.457|
+581|70|O|126066.00|1997-02-23|4-NOT SPECIFIED|Clerk#000000239|0| requests. even requests use slyly. blithely ironic |61.5792|-149.36|61.5792|-149.36|
+582|50|O|129004.81|1997-10-21|1-URGENT|Clerk#000000378|0|n pinto beans print a|61.6049|-149.463|61.6049|-149.463|
+583|49|O|127817.38|1997-03-19|3-MEDIUM|Clerk#000000792|0|efully express requests. a|61.6099|-149.328|61.6099|-149.328|
+608|26|O|62567.99|1996-02-28|3-MEDIUM|Clerk#000000995|0|nic waters wake slyly slyly expre|61.5531|-149.651|61.5531|-149.651|
+609|127|F|21088.59|1994-06-01|3-MEDIUM|Clerk#000000348|0|- ironic gifts believe furiously ca|61.6141|-149.457|61.6141|-149.457|
+610|52|O|175142.28|1995-08-02|1-URGENT|Clerk#000000610|0|totes. ironic, unusual packag|61.6141|-149.457|61.6141|-149.457|
+611|106|F|73907.63|1993-01-27|1-URGENT|Clerk#000000401|0|ounts detect furiously ac|61.5531|-149.651|61.5531|-149.651|
+612|82|F|145695.42|1992-10-21|3-MEDIUM|Clerk#000000759|0|boost quickly quickly final excuses. final foxes use bravely afte|61.6141|-149.457|61.6141|-149.457|
+613|139|O|33396.35|1995-06-18|2-HIGH|Clerk#000000172|0|ts hinder among the deposits. fluffily ironic depos|61.7321|-150.12|61.7321|-150.12|
+614|134|F|218116.21|1992-12-01|2-HIGH|Clerk#000000388|0| deposits! even, daring theodol|61.6141|-149.457|61.6141|-149.457|
+615|67|F|32890.89|1992-05-09|5-LOW|Clerk#000000388|0|t to promise asymptotes. packages haggle alongside of the fluffil|61.582|-149.441|61.582|-149.441|
+640|97|F|145495.62|1993-01-23|2-HIGH|Clerk#000000433|0|r, unusual accounts boost carefully final ideas. slyly silent theod|61.5818|-149.44|61.5818|-149.44|
+641|133|F|120626.49|1993-08-30|5-LOW|Clerk#000000175|0|ents cajole furiously about the quickly silent pac|61.6141|-149.457|61.6141|-149.457|
+642|40|F|22994.51|1993-12-16|3-MEDIUM|Clerk#000000357|0| among the requests wake slyly alongside of th|61.7321|-150.12|61.7321|-150.12|
+643|58|P|180396.95|1995-03-25|2-HIGH|Clerk#000000354|0|g dependencies. regular accounts |61.6308|-149.415|61.6308|-149.415|
+644|8|F|201268.06|1992-05-01|1-URGENT|Clerk#000000550|0| blithely unusual platelets haggle ironic, special excuses. excuses unwi|61.5801|-149.461|61.5801|-149.461|
+645|115|F|234763.73|1994-12-03|2-HIGH|Clerk#000000090|0|quickly daring theodolites across the regu|61.5811|-149.444|61.5811|-149.444|
+646|52|F|142070.65|1994-11-22|2-HIGH|Clerk#000000203|0|carefully even foxes. fina|61.6521|-149.92|61.6521|-149.92|
+647|143|O|56449.23|1997-08-07|1-URGENT|Clerk#000000270|0|egular pearls. carefully express asymptotes are. even account|61.6141|-149.307|61.6141|-149.307|
+672|109|F|89877.09|1994-04-14|5-LOW|Clerk#000000106|0|egular requests are furiously according to |61.6168|-149.328|61.6168|-149.328|
+673|80|F|21137.08|1994-03-10|1-URGENT|Clerk#000000448|0| special pinto beans use quickly furiously even depende|61.5714|-149.381|61.5714|-149.381|
+674|34|F|27204.60|1992-08-29|5-LOW|Clerk#000000448|0|ully special deposits. furiously final warhorses affix carefully. fluffily f|61.6521|-149.92|61.6521|-149.92|
+675|13|O|125188.72|1997-07-31|2-HIGH|Clerk#000000168|0|ffily between the careful|61.5858|-149.376|61.5858|-149.376|
+676|38|O|163966.67|1996-12-13|2-HIGH|Clerk#000000248|0|the final deposits. special, pending|61.5822|-149.463|61.5822|-149.463|
+677|124|F|147915.68|1993-11-24|3-MEDIUM|Clerk#000000824|0|uriously special pinto beans cajole carefully. fi|61.5861|-149.303|61.5861|-149.303|
+678|131|F|135761.05|1993-02-27|5-LOW|Clerk#000000530|0|. blithely final somas about the|61.5821|-149.438|61.5821|-149.438|
+679|49|O|8945.03|1995-12-15|2-HIGH|Clerk#000000853|0|tealthy, final pinto beans haggle slyly. pending platelets about the special, |61.6281|-149.338|61.6281|-149.338|
+704|85|O|56210.26|1996-11-21|3-MEDIUM|Clerk#000000682|0|blithely pending platelets wake alongside of the final, iron|61.5771|-149.335|61.5771|-149.335|
+705|43|O|83773.49|1997-02-13|4-NOT SPECIFIED|Clerk#000000294|0|ithely regular dependencies. express, even packages sleep slyly pending t|61.5917|-149.464|61.5917|-149.464|
+706|148|O|23973.60|1995-09-09|1-URGENT|Clerk#000000448|0|g the packages. deposits caj|61.1927|-149.86|61.1927|-149.86|
+707|118|F|58218.35|1994-11-20|3-MEDIUM|Clerk#000000199|0| ideas about the silent, bold deposits nag dolphins|61.1879|-149.85|61.1879|-149.85|
+708|32|O|100445.59|1998-07-03|3-MEDIUM|Clerk#000000101|0|lphins cajole about t|61.1814|-149.849|61.1814|-149.849|
+709|37|O|72055.87|1998-04-21|1-URGENT|Clerk#000000461|0|ons alongside of the carefully bold pinto bea|61.2104|-149.892|61.2104|-149.892|
+710|133|F|208974.42|1993-01-02|5-LOW|Clerk#000000026|0| regular, regular requests boost. fluffily re|61.2093|-149.903|61.2093|-149.903|
+711|64|F|92484.70|1993-09-23|4-NOT SPECIFIED|Clerk#000000856|0|its. fluffily regular gifts are furi|61.1481|-149.829|61.1481|-149.829|
+736|47|O|130204.17|1998-06-21|5-LOW|Clerk#000000881|0|refully of the final pi|61.2161|-149.876|61.2161|-149.876|
+737|121|F|12984.85|1992-04-26|5-LOW|Clerk#000000233|0|ake blithely express, ironic theodolites. blithely special accounts wa|61.1972|-149.75|61.1972|-149.75|
+738|22|F|114145.18|1993-03-02|4-NOT SPECIFIED|Clerk#000000669|0|ly even foxes. furiously regular accounts cajole ca|61.2066|-149.887|61.2066|-149.887|
+739|1|O|159171.69|1998-05-31|5-LOW|Clerk#000000900|0| against the slyly ironic packages nag slyly ironic|61.2161|-149.876|61.2161|-149.876|
+740|44|O|83490.99|1995-07-16|3-MEDIUM|Clerk#000000583|0|courts haggle furiously across the final, regul|61.195|-149.834|61.195|-149.834|
+741|106|O|47985.98|1998-07-07|2-HIGH|Clerk#000000295|0|ic instructions. slyly express instructions solv|61.2038|-149.808|61.2038|-149.808|
+742|103|F|207632.55|1994-12-23|5-LOW|Clerk#000000543|0|equests? slyly ironic dolphins boost carefully above the blithely|61.1228|-149.862|61.1228|-149.862|
+743|79|O|23614.89|1996-10-04|4-NOT SPECIFIED|Clerk#000000933|0|eans. furiously ironic deposits sleep carefully carefully qui|61.2005|-149.785|61.2005|-149.785|
+768|98|O|220636.82|1996-08-20|3-MEDIUM|Clerk#000000411|0|jole slyly ironic packages. slyly even idea|61.181|-149.825|61.181|-149.825|
+769|80|F|43092.76|1993-06-02|3-MEDIUM|Clerk#000000172|0|ggle furiously. ironic packages haggle slyly. bold platelets affix s|61.1867|-149.919|61.1867|-149.919|
+770|32|O|64271.75|1998-05-23|5-LOW|Clerk#000000572|0|heodolites. furiously special pinto beans cajole pac|61.1955|-149.911|61.1955|-149.911|
+771|46|O|105302.05|1995-06-17|1-URGENT|Clerk#000000105|0|s. furiously final instructions across the deposit|61.1089|-149.858|61.1089|-149.858|
+772|97|F|128234.96|1993-04-17|2-HIGH|Clerk#000000430|0|s boost blithely fluffily idle ideas? fluffily even pin|61.1805|-149.889|61.1805|-149.889|
+773|133|F|146862.27|1993-09-26|3-MEDIUM|Clerk#000000307|0|tions are quickly accounts. accounts use bold, even pinto beans. gifts ag|61.1534|-149.985|61.1534|-149.985|
+774|80|O|145857.60|1995-12-04|1-URGENT|Clerk#000000883|0|tealthily even depths|61.1901|-149.911|61.1901|-149.911|
+775|134|F|59455.61|1995-03-18|1-URGENT|Clerk#000000191|0|kly express requests. fluffily silent accounts poach furiously|61.2122|-149.734|61.2122|-149.734|
+800|56|O|87892.38|1998-07-14|2-HIGH|Clerk#000000213|0|y alongside of the pending packages? final platelets nag fluffily carefu|61.1951|-149.906|61.1951|-149.906|
+801|118|F|127717.72|1992-02-18|1-URGENT|Clerk#000000186|0|iously from the furiously enticing reques|61.2043|-149.869|61.2043|-149.869|
+802|137|F|156381.95|1995-01-05|1-URGENT|Clerk#000000516|0|posits. ironic, pending requests cajole. even theodol|61.2036|-149.869|61.2036|-149.869|
+803|16|O|27629.66|1997-04-29|5-LOW|Clerk#000000260|0|ic instructions. even deposits haggle furiously at the deposits-- regular de|61.1883|-149.886|61.1883|-149.886|
+804|50|F|94400.43|1993-03-12|3-MEDIUM|Clerk#000000931|0|s. blithely final foxes are about the packag|61.2141|-149.864|61.2141|-149.864|
+805|127|O|90042.41|1995-07-05|4-NOT SPECIFIED|Clerk#000000856|0|y according to the fluffily |61.1955|-149.782|61.1955|-149.782|
+806|131|O|26839.16|1996-06-20|2-HIGH|Clerk#000000240|0| the ironic packages wake carefully fina|61.2183|-149.894|61.2183|-149.894|
+807|145|F|222392.53|1993-11-24|3-MEDIUM|Clerk#000000012|0|refully special tithes. blithely regular accoun|61.1417|-149.864|61.1417|-149.864|
+832|29|F|68494.08|1992-04-19|5-LOW|Clerk#000000495|0|xes. bravely regular packages sleep up the furiously bold accou|61.1883|-149.883|61.1883|-149.883|
+833|56|F|49033.69|1994-02-13|3-MEDIUM|Clerk#000000437|0|ts haggle quickly across the slyl|61.2161|-149.876|61.2161|-149.876|
+834|43|F|46459.92|1994-05-23|3-MEDIUM|Clerk#000000805|0| sleep. quickly even foxes are boldly. slyly express requests use slyly|61.2193|-149.869|61.2193|-149.869|
+835|65|O|62430.67|1995-10-08|4-NOT SPECIFIED|Clerk#000000416|0|s about the carefully special foxes haggle quickly about the|61.2191|-149.888|61.2191|-149.888|
+836|70|O|72843.48|1996-11-25|4-NOT SPECIFIED|Clerk#000000729|0|ely bold excuses sleep regular ideas. furiously unusual ideas wake furiou|61.2191|-149.888|61.2191|-149.888|
+837|116|F|60918.41|1994-06-15|4-NOT SPECIFIED|Clerk#000000563|0|kages sleep slyly above the ironic, final orbits|61.2191|-149.888|61.2191|-149.888|
+838|17|O|82918.36|1998-01-29|5-LOW|Clerk#000000213|0| slyly around the slyly even|61.2191|-149.888|61.2191|-149.888|
+839|28|O|70182.63|1995-08-08|1-URGENT|Clerk#000000951|0|the carefully even platelets. furiously unusual fo|61.2191|-149.888|61.2191|-149.888|
+864|139|O|74710.74|1997-08-17|1-URGENT|Clerk#000000036|0|ly after the slyly regular deposits. express, regular asymptotes nag ca|61.2191|-149.888|61.2191|-149.888|
+865|4|F|70430.54|1993-05-04|3-MEDIUM|Clerk#000000337|0|. special packages wake after the carefully final accounts. express pinto be|61.2191|-149.888|61.2191|-149.888|
+866|40|F|4766.19|1992-11-28|3-MEDIUM|Clerk#000000718|0|ins after the even, even accounts nod blithel|61.2191|-149.888|61.2191|-149.888|
+867|26|F|7471.75|1993-11-16|3-MEDIUM|Clerk#000000877|0|pades nag quickly final, |61.2191|-149.888|61.2191|-149.888|
+868|104|F|127345.45|1992-06-09|4-NOT SPECIFIED|Clerk#000000782|0|onic theodolites print carefully. blithely dogge|61.2191|-149.888|61.2191|-149.888|
+869|136|O|58932.19|1997-01-12|2-HIGH|Clerk#000000245|0|ar sheaves are slowly. slyly even attainments boost theodolites. furiously|61.2191|-149.888|61.2191|-149.888|
+870|34|F|40492.37|1993-06-20|4-NOT SPECIFIED|Clerk#000000123|0|blithely ironic ideas nod. sly, r|61.2191|-149.888|61.2191|-149.888|
+871|16|O|172861.58|1995-11-15|5-LOW|Clerk#000000882|0|oss the ironic theodolites.|61.1891|-149.906|61.1891|-149.906|
+896|2|F|169847.63|1993-03-09|1-URGENT|Clerk#000000187|0|inal packages eat blithely according to the warhorses. furiously quiet de|61.2191|-149.888|61.2191|-149.888|
+897|49|P|57697.44|1995-03-20|1-URGENT|Clerk#000000316|0| wake quickly against |61.2191|-149.888|61.2191|-149.888|
+898|55|F|101020.75|1993-06-03|2-HIGH|Clerk#000000611|0|. unusual pinto beans haggle quickly across |61.1101|-149.857|61.1101|-149.857|
+899|109|O|125562.09|1998-04-08|5-LOW|Clerk#000000575|0|rts engage carefully final theodolites.|61.1101|-149.857|61.1101|-149.857|
+900|46|F|120073.51|1994-10-01|4-NOT SPECIFIED|Clerk#000000060|0| fluffily express deposits nag furiousl|61.1101|-149.857|61.1101|-149.857|
+901|13|O|81826.12|1998-07-21|4-NOT SPECIFIED|Clerk#000000929|0|lyly even foxes are furious, silent requests. requests about the quickly |61.1101|-149.857|61.1101|-149.857|
+902|10|F|37348.62|1994-07-27|4-NOT SPECIFIED|Clerk#000000811|0|yly final requests over the furiously regula|61.1101|-149.857|61.1101|-149.857|
+903|11|O|109351.87|1995-07-07|4-NOT SPECIFIED|Clerk#000000793|0|e slyly about the final pl|61.1101|-149.857|61.1101|-149.857|
+928|67|F|228136.49|1995-03-02|5-LOW|Clerk#000000450|0|ithely express pinto beans. |61.1101|-149.857|61.1101|-149.857|
+929|83|F|109301.02|1992-10-02|2-HIGH|Clerk#000000160|0|its. furiously even foxes affix carefully finally silent accounts. express req|61.1101|-149.857|61.1101|-149.857|
+930|131|F|199102.23|1994-12-17|1-URGENT|Clerk#000000004|0| accounts nag slyly. ironic, ironic accounts wake blithel|61.1101|-149.857|61.1101|-149.857|
+931|103|F|117909.23|1992-12-07|1-URGENT|Clerk#000000881|0|ss packages haggle furiously express, regular deposits. even, e|61.1101|-149.857|61.1101|-149.857|
+932|41|O|40234.50|1997-05-16|2-HIGH|Clerk#000000218|0|ly express instructions boost furiously reg|61.1101|-149.857|61.1101|-149.857|
+933|97|F|71349.30|1992-08-05|4-NOT SPECIFIED|Clerk#000000752|0|ial courts wake permanently against the furiously regular ideas. unusual |61.1101|-149.857|61.1101|-149.857|
+934|52|O|17213.59|1996-07-03|1-URGENT|Clerk#000000229|0|ts integrate carefully. sly, regular deposits af|61.1101|-149.857|61.1101|-149.857|
+935|50|O|97733.87|1997-09-24|5-LOW|Clerk#000000180|0|iously final deposits cajole. blithely even packages |61.1101|-149.857|61.1101|-149.857|
+960|35|F|63537.13|1994-09-21|3-MEDIUM|Clerk#000000120|0|regular accounts. requests|61.1101|-149.857|61.1101|-149.857|
+961|56|P|158893.16|1995-06-04|4-NOT SPECIFIED|Clerk#000000720|0|ons nag furiously among the quickl|61.1101|-149.857|61.1101|-149.857|
+962|37|F|98258.73|1994-05-06|5-LOW|Clerk#000000463|0|ments nag deposits. fluffily ironic a|61.1101|-149.857|61.1101|-149.857|
+963|26|F|53287.25|1994-05-26|3-MEDIUM|Clerk#000000497|0|uses haggle carefully. slyly even dependencies after the packages ha|61.1101|-149.857|61.1101|-149.857|
+964|76|O|131146.47|1995-05-20|3-MEDIUM|Clerk#000000657|0|print blithely ironic, careful theodolit|61.1101|-149.857|61.1101|-149.857|
+965|70|P|41758.44|1995-05-15|5-LOW|Clerk#000000218|0|iously special packages. slyly pending requests are carefully |64.8591|-147.917|64.8591|-147.917|
+966|14|O|120516.93|1998-04-30|2-HIGH|Clerk#000000239|0|special deposits. furious|64.8273|-147.715|64.8273|-147.715|
+967|110|F|179287.95|1992-06-21|3-MEDIUM|Clerk#000000167|0|excuses engage quickly bold dep|64.8281|-147.715|64.8281|-147.715|
+992|55|O|133665.12|1997-11-11|3-MEDIUM|Clerk#000000875|0|ts. regular pinto beans thrash carefully sl|64.8552|-147.763|64.8552|-147.763|
+993|80|O|198238.65|1995-09-10|3-MEDIUM|Clerk#000000894|0|quickly express accounts among the furiously bol|64.8481|-147.684|64.8481|-147.684|
+994|2|F|41433.48|1994-04-20|5-LOW|Clerk#000000497|0|ole. slyly bold excuses nag caref|64.8522|-147.773|64.8522|-147.773|
+995|116|P|135157.92|1995-05-31|3-MEDIUM|Clerk#000000439|0|deas. blithely final deposits play. express accounts wake blithely caref|64.8467|-147.703|64.8467|-147.703|
+996|71|O|47447.63|1997-12-29|1-URGENT|Clerk#000000497|0|arefully final packages into the slyly final requests affix blit|64.8963|-147.662|64.8963|-147.662|
+997|109|O|27561.82|1997-05-19|2-HIGH|Clerk#000000651|0|ly express depths. furiously final requests haggle furiously. carefu|64.8372|-147.796|64.8372|-147.796|
+998|32|F|65269.38|1994-11-26|4-NOT SPECIFIED|Clerk#000000956|0|ronic dolphins. ironic, bold ideas haggle furiously furious|64.8312|-147.716|64.8312|-147.716|
+999|61|F|145249.13|1993-09-05|5-LOW|Clerk#000000464|0|pitaphs sleep. regular accounts use. f|64.811|-147.71|64.811|-147.71|
+1024|4|O|176084.63|1997-12-23|5-LOW|Clerk#000000903|0| blithely. even, express theodolites cajole slyly across|64.8971|-147.663|64.8971|-147.663|
+1025|103|F|82034.03|1995-05-05|2-HIGH|Clerk#000000376|0|ross the slyly final pa|64.85|-147.699|64.85|-147.699|
+1026|73|O|36464.76|1997-06-04|5-LOW|Clerk#000000223|0|s wake blithely. special acco|64.8389|-147.743|64.8389|-147.743|
+1027|128|F|112770.89|1992-06-03|3-MEDIUM|Clerk#000000241|0|equests cajole. slyly final pinto bean|64.781|-148|64.781|-148|
+1028|70|F|153864.67|1994-01-01|2-HIGH|Clerk#000000131|0|ts are. final, silent deposits are among the fl|64.8377|-147.718|64.8377|-147.718|
+1029|130|F|47440.91|1994-06-21|2-HIGH|Clerk#000000700|0|quests sleep. slyly even foxes wake quickly final theodolites. clo|64.8248|-147.886|64.8248|-147.886|
+1030|134|F|16346.94|1994-06-15|5-LOW|Clerk#000000422|0|ully ironic accounts sleep carefully. requests are carefully alongside of the |64.818|-147.679|64.818|-147.679|
+1031|4|F|128024.71|1994-09-01|3-MEDIUM|Clerk#000000448|0|s; ironic theodolites along the carefully ex|64.8271|-147.79|64.8271|-147.79|
+1056|28|F|38446.39|1995-02-11|1-URGENT|Clerk#000000125|0|t, even deposits hang about the slyly special i|64.8451|-147.812|64.8451|-147.812|
+1057|76|F|108107.42|1992-02-20|1-URGENT|Clerk#000000124|0|cuses dazzle carefully careful, ironic pinto beans. carefully even theod|64.8311|-147.729|64.8311|-147.729|
+1058|53|F|89359.11|1993-04-26|3-MEDIUM|Clerk#000000373|0|kly pending courts haggle. blithely regular sheaves integrate carefully fi|64.8454|-147.855|64.8454|-147.855|
+1059|127|F|198360.22|1994-02-27|1-URGENT|Clerk#000000104|0|en accounts. carefully bold packages cajole daringly special depende|64.8302|-147.744|64.8302|-147.744|
+1060|140|F|121994.04|1993-02-21|3-MEDIUM|Clerk#000000989|0|l platelets sleep quickly slyly special requests. furiously |64.8113|-147.91|64.8113|-147.91|
+1061|103|O|166947.75|1998-05-15|5-LOW|Clerk#000000576|0|uests sleep at the packages. fur|64.8271|-147.789|64.8271|-147.789|
+1062|106|O|39805.04|1997-01-15|1-URGENT|Clerk#000000152|0|eposits use blithely |64.8451|-147.698|64.8451|-147.698|
+1063|37|F|41392.31|1994-04-02|2-HIGH|Clerk#000000024|0|deposits nag quickly regular deposits. quickl|64.8586|-147.69|64.8586|-147.69|
+1088|148|F|47120.41|1992-05-21|5-LOW|Clerk#000000347|0|counts are blithely. platelets print. carefully |64.8507|-147.702|64.8507|-147.702|
+1089|49|O|103192.74|1996-05-04|4-NOT SPECIFIED|Clerk#000000226|0|ns haggle ruthlessly. even requests are quickly abov|64.8371|-147.716|64.8371|-147.716|
+1090|19|O|32929.30|1997-11-15|2-HIGH|Clerk#000000300|0| furiously regular platelets haggle along the slyly unusual foxes! |64.8449|-147.743|64.8449|-147.743|
+1091|83|O|35795.22|1996-08-27|1-URGENT|Clerk#000000549|0| even pinto beans haggle quickly alongside of the eve|64.8475|-147.706|64.8475|-147.706|
+1092|124|P|85552.21|1995-03-04|3-MEDIUM|Clerk#000000006|0|re quickly along the blithe|64.8452|-147.714|64.8452|-147.714|
+1093|101|O|79189.58|1997-07-31|4-NOT SPECIFIED|Clerk#000000159|0| after the carefully ironic requests. carefully ironic packages wake fluffil|64.8125|-147.787|64.8125|-147.787|
+1094|145|O|9006.25|1997-12-24|3-MEDIUM|Clerk#000000570|0|beans affix furiously about the pending, even deposits. finally pendi|55.3801|-131.682|55.3801|-131.682|
+1095|145|O|178491.24|1995-08-22|3-MEDIUM|Clerk#000000709|0|sly bold requests cajole carefully according to|55.3801|-131.682|55.3801|-131.682|
+1120|140|O|107958.62|1997-11-07|3-MEDIUM|Clerk#000000319|0|lly special requests. slyly pending platelets are quickly pending requ|55.3801|-131.682|55.3801|-131.682|
+1121|29|O|241837.88|1997-01-13|3-MEDIUM|Clerk#000000541|0|r escapades. deposits above the fluffily bold requests hag|55.3801|-131.682|55.3801|-131.682|
+1122|121|O|179747.47|1997-01-10|1-URGENT|Clerk#000000083|0|uffily carefully final theodolites. furiously express packages affix|55.3801|-131.682|55.3801|-131.682|
+1123|73|O|93259.93|1996-08-03|3-MEDIUM|Clerk#000000929|0|uriously pending requests. slyly regular instruction|55.3801|-131.682|55.3801|-131.682|
+1124|80|O|141858.97|1998-07-30|5-LOW|Clerk#000000326|0|regular pinto beans along the fluffily silent packages|55.3599|-131.687|55.3599|-131.687|
+1125|25|F|80438.38|1994-10-27|2-HIGH|Clerk#000000510|0|ithely final requests. i|55.4381|-131.803|55.4381|-131.803|
+1126|145|O|59982.31|1998-01-28|4-NOT SPECIFIED|Clerk#000000928|0|d slyly regular ideas: special ideas believe slyly. slyly ironic sheaves w|55.3751|-131.718|55.3751|-131.718|
+1127|58|O|103320.91|1995-09-19|4-NOT SPECIFIED|Clerk#000000397|0|usly silent, regular pinto beans. blithely express requests boos|55.3421|-131.641|55.3421|-131.641|
+1152|49|F|51775.54|1994-08-14|4-NOT SPECIFIED|Clerk#000000496|0|equests. deposits ab|55.3408|-131.64|55.3408|-131.64|
+1153|121|O|220727.97|1996-04-18|5-LOW|Clerk#000000059|0| across the pending deposi|55.2978|-131.534|55.2978|-131.534|
+1154|37|F|192417.85|1992-02-15|1-URGENT|Clerk#000000268|0|old asymptotes are special requests. blithely even deposits sleep furiously|55.3801|-131.682|55.3801|-131.682|
+1155|149|O|126902.81|1997-10-06|2-HIGH|Clerk#000000164|0|c deposits haggle among the ironic, even requests. carefully ironic sheaves n|55.3801|-131.682|55.3801|-131.682|
+1156|133|O|217682.81|1996-10-19|1-URGENT|Clerk#000000200|0| blithely ironic dolphins. furiously pendi|55.3421|-131.622|55.3421|-131.622|
+1157|97|O|85394.06|1998-01-14|4-NOT SPECIFIED|Clerk#000000207|0|out the regular excuses boost carefully against the furio|55.3801|-131.682|55.3801|-131.682|
+1158|142|O|31075.51|1996-06-30|2-HIGH|Clerk#000000549|0|integrate slyly furiously ironic deposit|55.3801|-131.682|55.3801|-131.682|
+1159|70|F|55553.68|1992-09-18|3-MEDIUM|Clerk#000000992|0|ts may sleep. requests according to the|55.3801|-131.682|55.3801|-131.682|
+1184|89|O|39700.29|1997-10-26|5-LOW|Clerk#000000777|0|iously even packages haggle fluffily care|55.3267|-131.523|55.3267|-131.523|
+1185|74|F|47033.21|1992-08-24|5-LOW|Clerk#000000344|0| even escapades are. package|55.3522|-131.685|55.3522|-131.685|
+1186|59|O|82026.18|1996-08-15|4-NOT SPECIFIED|Clerk#000000798|0|ingly regular pinto beans: instructi|55.5351|-133.014|55.5351|-133.014|
+1187|134|F|85948.02|1992-11-20|3-MEDIUM|Clerk#000000047|0|s after the furiously final deposits boost slyly under the|55.5351|-133.014|55.5351|-133.014|
+1188|20|O|54655.07|1996-04-11|2-HIGH|Clerk#000000256|0|ully ironic deposits. slyl|55.5351|-133.014|55.5351|-133.014|
+1189|46|F|71017.99|1994-04-09|1-URGENT|Clerk#000000243|0|f the even accounts. courts print blithely ironic accounts. sile|55.5351|-133.014|55.5351|-133.014|
+1190|13|O|31043.39|1997-03-16|5-LOW|Clerk#000000575|0|ccounts above the foxes integrate carefully after the |55.5351|-133.014|55.5351|-133.014|
+1191|112|O|28623.04|1995-11-07|3-MEDIUM|Clerk#000000011|0|uests nag furiously. carefully even requests|55.4691|-132.855|55.4691|-132.855|
+1216|122|F|68056.57|1992-12-07|5-LOW|Clerk#000000918|0|nal foxes around the e|55.5511|-133.081|55.5511|-133.081|
+1217|7|F|40982.08|1992-04-26|4-NOT SPECIFIED|Clerk#000000538|0| foxes nag quickly. ironic excuses nod. blithely pending|55.5351|-133.014|55.5351|-133.014|
+1218|10|F|99834.47|1994-06-20|4-NOT SPECIFIED|Clerk#000000994|0|s cajole. special, silent deposits about the theo|55.5531|-133.097|55.5531|-133.097|
+1219|28|O|10163.56|1995-10-05|3-MEDIUM|Clerk#000000800|0|od carefully. slyly final dependencies across the even fray|55.5351|-133.014|55.5351|-133.014|
+1220|49|O|122157.14|1996-08-29|1-URGENT|Clerk#000000712|0|inal theodolites wake. fluffily ironic asymptotes cajol|55.4726|-131.793|55.4726|-131.793|
+1221|14|F|117397.16|1992-04-19|4-NOT SPECIFIED|Clerk#000000852|0| detect against the silent, even deposits. carefully ironic|55.3801|-131.682|55.3801|-131.682|
+1222|10|F|47623.94|1993-02-05|3-MEDIUM|Clerk#000000811|0|theodolites use quickly even accounts. carefully final asympto|55.3801|-131.682|55.3801|-131.682|
+1223|10|O|26714.67|1996-05-25|4-NOT SPECIFIED|Clerk#000000238|0|posits was blithely fr|55.3801|-131.682|55.3801|-131.682|
+1248|49|F|210713.88|1992-01-02|1-URGENT|Clerk#000000890|0|t the carefully regular dugouts. s|61.5745|-149.562|61.5745|-149.562|
+1249|149|F|45889.09|1994-01-05|1-URGENT|Clerk#000000095|0|al ideas sleep above the pending pin|61.7321|-150.12|61.7321|-150.12|
+1250|37|F|12907.62|1992-09-29|4-NOT SPECIFIED|Clerk#000000652|0|ts after the fluffily pending instructions use slyly about the s|61.5421|-149.419|61.5421|-149.419|
+1251|38|O|109536.55|1997-10-30|1-URGENT|Clerk#000000276|0|, brave sauternes. deposits boost fluffily.|61.5722|-149.702|61.5722|-149.702|
+1252|149|O|93403.05|1997-08-04|5-LOW|Clerk#000000348|0|ng the slyly regular excuses. special courts nag furiously blithely e|61.5743|-149.405|61.5743|-149.405|
+1253|115|F|92730.74|1993-01-26|1-URGENT|Clerk#000000775|0| requests sleep furiously even foxes. ruthless packag|61.578|-149.441|61.578|-149.441|
+1254|70|O|94649.25|1995-12-22|1-URGENT|Clerk#000000607|0| pinto beans. carefully regular request|61.5826|-149.427|61.5826|-149.427|
+1255|122|F|62518.31|1994-05-30|4-NOT SPECIFIED|Clerk#000000798|0|ct slyly regular accounts. quick|61.5586|-149.351|61.5586|-149.351|
+1280|97|F|91664.85|1993-01-11|5-LOW|Clerk#000000160|0|posits thrash quickly after the theodolites. furiously iro|61.5844|-149.442|61.5844|-149.442|
+1281|62|F|165454.51|1994-12-11|1-URGENT|Clerk#000000430|0|counts. carefully pending accounts eat |61.5817|-149.472|61.5817|-149.472|
+1282|116|F|61297.42|1992-02-29|4-NOT SPECIFIED|Clerk#000000168|0|he quickly special packages. furiously final re|61.6141|-149.457|61.6141|-149.457|
+1283|118|O|202623.92|1996-08-30|4-NOT SPECIFIED|Clerk#000000260|0| pinto beans boost slyly ac|61.5761|-149.602|61.5761|-149.602|
+1284|134|O|106122.38|1996-01-07|2-HIGH|Clerk#000000492|0|s. blithely silent deposits s|61.6141|-149.457|61.6141|-149.457|
+1285|11|F|139124.72|1992-06-01|1-URGENT|Clerk#000000423|0|cial deposits cajole after the ironic requests. p|61.58|-149.434|61.58|-149.434|
+1286|109|F|207291.83|1993-05-14|4-NOT SPECIFIED|Clerk#000000939|0| deposits use carefully from the excuses. slyly bold p|61.6002|-149.429|61.6002|-149.429|
+1287|19|F|131432.42|1994-07-05|2-HIGH|Clerk#000000288|0|ly ironic dolphins integrate furiously among the final packages. st|61.569|-149.347|61.569|-149.347|
+1312|112|F|58111.00|1994-05-19|3-MEDIUM|Clerk#000000538|0|n, express accounts across the ironic|61.5812|-149.448|61.5812|-149.448|
+1313|148|F|46598.65|1994-09-13|1-URGENT|Clerk#000000774|0|ld accounts. regular deposits cajole. ironically pending theodolites use car|61.6141|-149.457|61.6141|-149.457|
+1314|143|F|56207.66|1994-05-13|3-MEDIUM|Clerk#000000485|0|ickly blithe packages nod ideas. furiously bold braids boost around the car|61.6141|-149.457|61.6141|-149.457|
+1315|22|O|121935.23|1998-03-22|5-LOW|Clerk#000000840|0|final theodolites alongside of the carefu|61.6141|-149.457|61.6141|-149.457|
+1316|16|F|163746.47|1993-12-03|1-URGENT|Clerk#000000857|0|ully bold theodolites? pending, bold pin|61.5969|-149.367|61.5969|-149.367|
+1317|100|P|139714.71|1995-05-19|2-HIGH|Clerk#000000373|0|sts. furiously special deposits lose fur|61.58|-149.4|61.58|-149.4|
+1318|128|O|81663.65|1998-06-27|3-MEDIUM|Clerk#000000581|0|s hang bold requests. pending, re|61.5848|-149.445|61.5848|-149.445|
+1319|32|O|31103.83|1996-09-27|2-HIGH|Clerk#000000257|0|y across the ruthlessly ironic accounts. unusu|61.5811|-149.444|61.5811|-149.444|
+1344|17|F|43809.37|1992-04-16|5-LOW|Clerk#000000178|0|omise close, silent requests. pending theodolites boost pending |61.5733|-149.389|61.5733|-149.389|
+1345|95|F|111207.93|1992-10-28|5-LOW|Clerk#000000447|0| regular tithes. quickly fluffy de|61.6141|-149.457|61.6141|-149.457|
+1346|76|F|171975.62|1992-06-18|2-HIGH|Clerk#000000374|0|ges sleep quickly-- even pint|61.5952|-149.436|61.5952|-149.436|
+1347|41|O|173444.60|1997-06-20|5-LOW|Clerk#000000977|0|he furiously even foxes use carefully express req|61.5421|-149.419|61.5421|-149.419|
+1348|19|O|94135.77|1998-04-18|5-LOW|Clerk#000000206|0|tly. quickly even deposi|61.5783|-149.362|61.5783|-149.362|
+1349|64|O|46376.09|1997-10-26|1-URGENT|Clerk#000000543|0|yly! blithely special theodolites cajole. unusual, reg|61.7321|-150.12|61.7321|-150.12|
+1350|52|F|49305.98|1993-08-24|1-URGENT|Clerk#000000635|0|iously about the blithely special a|61.5691|-149.328|61.5691|-149.328|
+1351|106|O|24637.96|1998-04-20|1-URGENT|Clerk#000000012|0| cajole. regular, special re|61.6141|-149.457|61.6141|-149.457|
+1376|47|O|23984.88|1997-05-04|4-NOT SPECIFIED|Clerk#000000730|0|der furiously final, final frets. carefull|61.5819|-149.3|61.5819|-149.3|
+1377|20|O|108334.30|1998-04-24|4-NOT SPECIFIED|Clerk#000000625|0|lly across the blithely express accounts. ironic excuses promise carefully de|61.6431|-149.289|61.6431|-149.289|
+1378|20|O|118495.12|1996-03-09|4-NOT SPECIFIED|Clerk#000000705|0| furiously even tithes cajole slyly among the quick|61.6431|-149.292|61.6431|-149.292|
+1379|65|O|84627.76|1998-05-25|5-LOW|Clerk#000000861|0|y deposits are caref|61.6228|-149.313|61.6228|-149.313|
+1380|137|O|94969.41|1996-07-07|3-MEDIUM|Clerk#000000969|0|inal deposits wake slyly daringly even requests. bold, even foxe|61.2125|-149.894|61.2125|-149.894|
+1381|127|O|58212.22|1998-05-25|3-MEDIUM|Clerk#000000107|0|even requests breach after the bold, ironic instructions. slyly even|61.1879|-149.886|61.1879|-149.886|
+1382|133|F|173522.71|1993-08-17|5-LOW|Clerk#000000241|0|fully final packages sl|61.1594|-149.835|61.1594|-149.835|
+1383|121|F|34797.72|1993-04-27|2-HIGH|Clerk#000000785|0|ts. express requests sleep blithel|61.2123|-149.854|61.2123|-149.854|
+1408|55|O|183965.61|1997-12-26|4-NOT SPECIFIED|Clerk#000000942|0|t the quickly final asymptotes. unusual|61.1951|-149.945|61.1951|-149.945|
+1409|143|F|72440.52|1992-12-31|4-NOT SPECIFIED|Clerk#000000065|0|ructions. furiously unusual excuses are regular, unusual theodolites. fin|61.2138|-149.856|61.2138|-149.856|
+1410|113|O|114879.19|1997-04-12|5-LOW|Clerk#000000123|0|iously along the bravely regular dolphins. pinto beans cajole furiously sp|61.1255|-149.864|61.1255|-149.864|
+1411|95|F|164462.61|1994-12-21|2-HIGH|Clerk#000000566|0|s. furiously special excuses across the pending pinto beans haggle sp|61.2066|-149.808|61.2066|-149.808|
+1412|53|F|78676.54|1993-03-13|4-NOT SPECIFIED|Clerk#000000083|0|uffily daring theodolit|61.2138|-149.896|61.2138|-149.896|
+1413|91|O|75733.58|1997-06-14|3-MEDIUM|Clerk#000000342|0|, ironic instructions. carefully even packages dazzle|61.2161|-149.876|61.2161|-149.876|
+1414|77|O|38057.81|1995-08-16|1-URGENT|Clerk#000000883|0|ccounts. ironic foxes haggle car|61.1594|-149.888|61.1594|-149.888|
+1415|79|F|24654.79|1994-05-29|4-NOT SPECIFIED|Clerk#000000601|0|rays. blithely final ideas affix quickl|61.1806|-149.775|61.1806|-149.775|
+1440|98|O|50201.16|1995-08-10|5-LOW|Clerk#000000956|0| pending requests. closely s|61.1101|-149.857|61.1101|-149.857|
+1441|122|O|156477.94|1997-03-06|4-NOT SPECIFIED|Clerk#000000156|0|ter the excuses. ironic dependencies m|61.1541|-149.958|61.1541|-149.958|
+1442|112|F|7108.12|1994-07-05|4-NOT SPECIFIED|Clerk#000000935|0|nal pinto beans. slyly ironic ideas cajol|61.1268|-149.947|61.1268|-149.947|
+1443|44|O|44672.03|1996-12-16|5-LOW|Clerk#000000185|0|x blithely against the carefully final somas. even asymptotes are. quickly spe|61.0931|-149.785|61.0931|-149.785|
+1444|134|F|207907.60|1994-12-06|3-MEDIUM|Clerk#000000783|0|ove the bold accounts cajole fluffily about|61.1901|-149.892|61.1901|-149.892|
+1445|115|F|154653.32|1995-01-10|3-MEDIUM|Clerk#000000211|0|even packages wake fluffily |61.2183|-149.889|61.2183|-149.889|
+1446|41|O|27663.16|1998-02-16|5-LOW|Clerk#000000274|0|lly regular notornis above the requests sleep final accounts! |61.2164|-149.882|61.2164|-149.882|
+1447|91|F|108171.38|1992-10-15|2-HIGH|Clerk#000000880|0|inly against the blithely pending excuses. regular, pe|61.2161|-149.876|61.2161|-149.876|
+1472|149|O|65331.05|1996-10-06|5-LOW|Clerk#000000303|0|y special dolphins around the final dependencies wake quick|61.219|-149.792|61.219|-149.792|
+1473|94|O|80624.38|1997-03-17|3-MEDIUM|Clerk#000000960|0|furiously close accoun|61.2188|-149.892|61.2188|-149.892|
+1474|70|F|51697.18|1995-01-09|1-URGENT|Clerk#000000438|0|detect quickly above the carefully even |61.2143|-149.837|61.2143|-149.837|
+1475|5|O|185496.66|1997-11-12|2-HIGH|Clerk#000000972|0|cally final packages boost. blithely ironic packa|61.1608|-149.835|61.1608|-149.835|
+1476|145|O|18795.62|1996-06-27|2-HIGH|Clerk#000000673|0|ding accounts hinder alongside of the quickly pending requests. fluf|61.1886|-149.944|61.1886|-149.944|
+1477|76|O|231831.35|1997-08-24|5-LOW|Clerk#000000612|0|ly bold foxes. final ideas would cajo|61.1201|-149.89|61.1201|-149.89|
+1478|50|O|20791.50|1997-08-03|2-HIGH|Clerk#000000827|0|lessly. carefully express|61.1201|-149.89|61.1201|-149.89|
+1479|16|O|31471.04|1995-12-16|4-NOT SPECIFIED|Clerk#000000697|0|he furiously even foxes. thinly bold deposits|61.1585|-149.872|61.1585|-149.872|
+1504|2|F|89399.40|1992-08-28|3-MEDIUM|Clerk#000000381|0|, brave deposits. bold de|61.195|-149.892|61.195|-149.892|
+1505|37|F|55892.35|1992-08-21|2-HIGH|Clerk#000000544|0|s. slyly ironic packages cajole. carefully regular packages haggle |61.0895|-149.694|61.0895|-149.694|
+1506|148|F|195844.84|1992-09-21|3-MEDIUM|Clerk#000000620|0| dependencies. accounts affix blithely slowly unusual deposits. slyly regular |61.2201|-149.831|61.2201|-149.831|
+1507|121|F|96166.92|1993-10-14|3-MEDIUM|Clerk#000000305|0|stealthy, ironic de|61.1663|-149.867|61.1663|-149.867|
+1508|103|O|151282.65|1998-04-10|5-LOW|Clerk#000000117|0| after the furiously regular pinto beans hang slyly quickly ironi|61.2138|-149.906|61.2138|-149.906|
+1509|64|F|180455.98|1993-07-08|5-LOW|Clerk#000000770|0|the regular ideas. regul|61.2193|-149.902|61.2193|-149.902|
+1510|53|O|154590.05|1996-09-17|5-LOW|Clerk#000000128|0|ld carefully. furiously final asymptotes haggle furiously|61.1201|-149.89|61.1201|-149.89|
+1511|79|O|59651.38|1996-12-22|4-NOT SPECIFIED|Clerk#000000386|0|ts above the depend|61.1601|-149.984|61.1601|-149.984|
+1536|94|O|5184.26|1997-01-26|3-MEDIUM|Clerk#000000117|0|ges are! furiously final deposits cajole iron|61.1101|-149.857|61.1101|-149.857|
+1537|109|F|108317.51|1992-02-15|4-NOT SPECIFIED|Clerk#000000862|0|g to the even deposits. ironic, final packages |61.1101|-149.857|61.1101|-149.857|
+1538|29|O|179554.41|1995-06-18|4-NOT SPECIFIED|Clerk#000000258|0| instructions. regular theod|61.1101|-149.857|61.1101|-149.857|
+1539|112|F|39612.63|1995-03-10|5-LOW|Clerk#000000840|0|nstructions boost pa|61.1101|-149.857|61.1101|-149.857|
+1540|16|F|128014.15|1992-08-05|2-HIGH|Clerk#000000927|0|r ideas hinder blithe|61.1101|-149.857|61.1101|-149.857|
+1541|94|P|47286.32|1995-05-18|1-URGENT|Clerk#000000906|0|y. slyly ironic warhorses around the furiously regul|61.1101|-149.857|61.1101|-149.857|
+1542|143|F|132972.24|1993-09-15|3-MEDIUM|Clerk#000000435|0|t the furiously close deposits do was f|61.1101|-149.857|61.1101|-149.857|
+1543|52|O|139047.22|1997-02-20|1-URGENT|Clerk#000000398|0|unts. furiously pend|61.1101|-149.857|61.1101|-149.857|
+1568|17|O|76119.72|1997-01-30|4-NOT SPECIFIED|Clerk#000000554|0|d notornis. carefully |61.1101|-149.857|61.1101|-149.857|
+1569|104|O|87803.55|1998-04-02|5-LOW|Clerk#000000786|0|orbits. fluffily even decoys serve blithely. furiously furious realms nag acro|61.1101|-149.857|61.1101|-149.857|
+1570|124|O|35589.57|1998-03-16|1-URGENT|Clerk#000000745|0|pinto beans haggle furiousl|61.1101|-149.857|61.1101|-149.857|
+1571|103|F|151404.78|1992-12-05|2-HIGH|Clerk#000000565|0|ously furiously bold warthogs. slyly ironic instructions are quickly a|61.1101|-149.857|61.1101|-149.857|
+1572|11|O|47232.79|1996-02-24|2-HIGH|Clerk#000000994|0|fluffily ironic accounts haggle blithely final platelets! slyly regular foxes|61.1101|-149.857|61.1101|-149.857|
+1573|148|F|86918.57|1992-12-28|2-HIGH|Clerk#000000940|0|ess, ironic deposits use along the carefu|61.1101|-149.857|61.1101|-149.857|
+1574|134|O|179923.54|1996-12-12|3-MEDIUM|Clerk#000000809|0| ideas hinder after the carefully unusual |61.1101|-149.857|61.1101|-149.857|
+1575|145|O|197031.52|1995-09-13|3-MEDIUM|Clerk#000000497|0|. furiously regular dep|61.1101|-149.857|61.1101|-149.857|
+1600|94|F|130515.61|1993-03-03|3-MEDIUM|Clerk#000000627|0|tions cajole quietly above the regular, silent requests. slyly fin|61.1101|-149.857|61.1101|-149.857|
+1601|53|F|73962.95|1994-08-27|5-LOW|Clerk#000000469|0|ent deposits are ca|61.1101|-149.857|61.1101|-149.857|
+1602|1|F|4225.26|1993-08-05|5-LOW|Clerk#000000660|0|deposits. busily silent instructions haggle furiously. fin|61.1101|-149.857|61.1101|-149.857|
+1603|2|F|29305.47|1993-07-31|4-NOT SPECIFIED|Clerk#000000869|0|s. slyly silent deposits boo|61.1101|-149.857|61.1101|-149.857|
+1604|113|F|107139.29|1993-07-17|5-LOW|Clerk#000000512|0|lithely silent waters. blithely unusual packages alongside |61.1101|-149.857|61.1101|-149.857|
+1605|58|O|130687.64|1998-04-24|4-NOT SPECIFIED|Clerk#000000616|0|sleep furiously? ruthless, even pinto beans |61.1101|-149.857|61.1101|-149.857|
+1606|53|O|115877.40|1997-04-17|4-NOT SPECIFIED|Clerk#000000550|0|r requests. quickly even platelets breach before the ironically|61.1101|-149.857|61.1101|-149.857|
+1607|149|O|166335.03|1995-12-16|2-HIGH|Clerk#000000498|0| bold, pending foxes haggle. slyly silent |61.1101|-149.857|61.1101|-149.857|
+1632|67|O|183286.33|1997-01-08|3-MEDIUM|Clerk#000000351|0|onic requests are accounts. bold a|61.1101|-149.857|61.1101|-149.857|
+1633|16|O|52359.51|1995-10-14|2-HIGH|Clerk#000000666|0|y silent accounts sl|61.1101|-149.857|61.1101|-149.857|
+1634|70|O|145898.47|1996-09-10|1-URGENT|Clerk#000000360|0|arefully blithely ironic requests. slyly unusual instructions alongside|61.1101|-149.857|61.1101|-149.857|
+1635|4|O|70232.26|1997-02-13|3-MEDIUM|Clerk#000000958|0|s. slyly ironic requests affix slyly |61.1101|-149.857|61.1101|-149.857|
+1636|79|O|172021.87|1997-06-17|3-MEDIUM|Clerk#000000457|0|ding requests. slyly ironic courts wake quickl|61.1101|-149.857|61.1101|-149.857|
+1637|73|F|180912.15|1995-02-08|4-NOT SPECIFIED|Clerk#000000189|0| final accounts. blithely silent ideas cajole bravely. carefully express |61.1101|-149.857|61.1101|-149.857|
+1638|139|O|172436.30|1997-08-13|2-HIGH|Clerk#000000643|0|he fluffily regular asymp|61.1101|-149.857|61.1101|-149.857|
+1639|5|O|104166.56|1995-08-20|4-NOT SPECIFIED|Clerk#000000939|0|haggle furiously. final requests detect furious|61.1101|-149.857|61.1101|-149.857|
+1664|64|O|178060.22|1996-03-03|1-URGENT|Clerk#000000090|0|y quickly even asymptotes. furiously regular packages haggle quickly fin|61.1101|-149.857|61.1101|-149.857|
+1665|76|F|4819.91|1994-05-08|2-HIGH|Clerk#000000920|0|ly regular packages are fluffily even ideas. fluffily final|61.1101|-149.857|61.1101|-149.857|
+1666|95|O|128367.97|1995-10-18|1-URGENT|Clerk#000000849|0|ffily pending dependencies wake fluffily. pending, final accounts |61.1101|-149.857|61.1101|-149.857|
+1667|5|O|125030.37|1997-10-10|2-HIGH|Clerk#000000103|0|e accounts. slyly express accounts must are a|64.8459|-147.759|64.8459|-147.759|
+1668|142|O|137576.19|1997-07-12|4-NOT SPECIFIED|Clerk#000000148|0|eodolites. carefully dogged dolphins haggle q|64.8426|-147.725|64.8426|-147.725|
+1669|2|O|24362.39|1997-06-09|3-MEDIUM|Clerk#000000663|0|er ironic requests detect furiously blithely sp|64.9401|-147.402|64.9401|-147.402|
+1670|25|O|89999.72|1997-05-24|2-HIGH|Clerk#000000320|0|unusual dependencies. furiously special platelets main|64.9401|-147.402|64.9401|-147.402|
+1671|35|O|104391.11|1996-07-27|4-NOT SPECIFIED|Clerk#000000275|0|ly. slyly pending requests was above the |64.8331|-147.647|64.8331|-147.647|
+1696|4|O|102665.03|1998-01-08|4-NOT SPECIFIED|Clerk#000000041|0|bravely bold accounts above the quickly bold|64.8371|-147.716|64.8371|-147.716|
+1697|76|O|122621.31|1996-10-07|1-URGENT|Clerk#000000815|0|o x-ray blithely. pl|64.8574|-147.759|64.8574|-147.759|
+1698|40|O|141118.87|1997-04-23|2-HIGH|Clerk#000000432|0|slyly. carefully express deposit|64.836|-147.727|64.836|-147.727|
+1699|85|F|66408.29|1993-12-30|1-URGENT|Clerk#000000125|0|jole blithely. furiously un|64.8132|-147.76|64.8132|-147.76|
+1700|65|O|89143.36|1996-06-15|3-MEDIUM|Clerk#000000328|0|ely final dolphins wake sometimes above the quietly regular deposits. fur|64.8451|-147.96|64.8451|-147.96|
+1701|130|F|72835.95|1992-05-19|2-HIGH|Clerk#000000395|0|furiously. regular, close theodoli|64.8891|-147.851|64.8891|-147.851|
+1702|67|P|194119.31|1995-05-07|2-HIGH|Clerk#000000300|0|around the carefully final deposits cajole carefully according to the b|64.8151|-147.707|64.8151|-147.707|
+1703|134|F|121220.59|1993-01-28|3-MEDIUM|Clerk#000000463|0| pinto beans poach. bold courts boost. regular, express deposits at|64.8363|-147.803|64.8363|-147.803|
+1728|64|O|131604.34|1996-05-22|2-HIGH|Clerk#000000711|0|beans. slyly regular instructions sleep! slyly final packages|64.8298|-147.611|64.8298|-147.611|
+1729|133|F|12137.76|1992-05-19|2-HIGH|Clerk#000000158|0|pending foxes wake. accounts|64.8989|-147.701|64.8989|-147.701|
+1730|124|O|150886.49|1998-07-24|5-LOW|Clerk#000000794|0| fluffily pending deposits serve. furiously even requests wake furiou|64.8371|-147.716|64.8371|-147.716|
+1731|128|O|190490.78|1996-01-06|1-URGENT|Clerk#000000268|0|lithely regular, final instructions. ironic, express packages are above|64.8147|-147.706|64.8147|-147.706|
+1732|146|F|179854.51|1993-11-29|5-LOW|Clerk#000000903|0|inal requests integrate dolph|64.8451|-147.812|64.8451|-147.812|
+1733|148|O|165489.52|1996-05-12|2-HIGH|Clerk#000000789|0|e carefully according to the accounts. furiously pending instructions sleep|64.8386|-147.788|64.8386|-147.788|
+1734|7|F|44002.53|1994-06-11|2-HIGH|Clerk#000000722|0| final ideas haggle. blithely quick foxes sleep busily bold ideas. i|64.8372|-147.768|64.8372|-147.768|
+1735|22|F|98541.95|1992-12-27|1-URGENT|Clerk#000000458|0|ully idle requests wake qu|64.8151|-147.707|64.8151|-147.707|
+1760|115|O|82151.12|1996-05-17|5-LOW|Clerk#000000917|0| deposits. busily regular deposits wake blithely along the furiously even re|64.843|-147.722|64.843|-147.722|
+1761|106|F|211925.95|1993-12-24|2-HIGH|Clerk#000000817|0|efully slyly bold frets. packages boost b|64.8426|-147.725|64.8426|-147.725|
+1762|77|F|202227.17|1994-08-20|4-NOT SPECIFIED|Clerk#000000653|0|ly ironic packages. furi|64.8615|-147.723|64.8615|-147.723|
+1763|121|O|140685.01|1996-10-29|2-HIGH|Clerk#000000321|0|es. bold dependencies haggle furiously along |64.8694|-147.067|64.8694|-147.067|
+1764|29|F|47384.71|1992-03-25|1-URGENT|Clerk#000000182|0|. slyly final packages integrate carefully acro|64.8404|-147.724|64.8404|-147.724|
+1765|73|O|36551.43|1995-12-03|4-NOT SPECIFIED|Clerk#000000490|0| regular excuses wake slyly|64.9686|-147.577|64.9686|-147.577|
+1766|139|O|41032.81|1996-10-12|2-HIGH|Clerk#000000983|0|unusual deposits affix quickly beyond the carefully s|64.8497|-147.732|64.8497|-147.732|
+1767|25|P|136582.60|1995-03-14|2-HIGH|Clerk#000000327|0|eposits use carefully carefully regular platelets. quickly regular packages al|64.8861|-147.587|64.8861|-147.587|
+1792|49|F|107919.86|1993-11-09|5-LOW|Clerk#000000102|0|ructions haggle along the pending packages. carefully speci|64.8508|-147.703|64.8508|-147.703|
+1793|19|F|82504.56|1992-07-12|4-NOT SPECIFIED|Clerk#000000291|0|regular packages cajole. blithely special packages according to the final d|64.841|-147.72|64.841|-147.72|
+1794|140|O|179462.21|1997-09-28|1-URGENT|Clerk#000000686|0|ally silent pinto beans. regular package|64.8375|-147.721|64.8375|-147.721|
+1795|94|F|146849.33|1994-03-19|2-HIGH|Clerk#000000815|0| quickly final packages! blithely dogged accounts c|64.849|-147.813|64.849|-147.813|
+1796|47|F|33755.47|1992-11-21|2-HIGH|Clerk#000000245|0|eans use furiously around th|55.3801|-131.682|55.3801|-131.682|
+1797|125|O|51494.47|1996-05-07|3-MEDIUM|Clerk#000000508|0|quiet platelets haggle since the quickly ironic instructi|55.3801|-131.682|55.3801|-131.682|
+1798|52|O|46393.97|1997-07-28|1-URGENT|Clerk#000000741|0|al foxes are blithe|55.3603|-131.702|55.3603|-131.702|
+1799|61|F|46815.93|1994-03-07|4-NOT SPECIFIED|Clerk#000000339|0|ns sleep furiously final waters. blithely regular instructions h|55.7511|-132.865|55.7511|-132.865|
+1824|49|F|81351.53|1994-05-05|1-URGENT|Clerk#000000972|0|e blithely fluffily|55.7511|-132.865|55.7511|-132.865|
+1825|148|F|150582.77|1993-12-05|3-MEDIUM|Clerk#000000345|0|ironic, final accou|60.3311|-151.284|60.3311|-151.284|
+1826|82|F|124719.97|1992-04-16|4-NOT SPECIFIED|Clerk#000000718|0|the even asymptotes dazzle fluffily slyly regular asymptotes. final, unu|60.3311|-151.284|60.3311|-151.284|
+1827|106|O|210113.88|1996-06-22|4-NOT SPECIFIED|Clerk#000000369|0|luffily even requests haggle sly|60.3311|-151.284|60.3311|-151.284|
+1828|32|F|137369.50|1994-04-18|3-MEDIUM|Clerk#000000840|0|y quickly bold packag|60.4341|-151.283|60.4341|-151.283|
+1829|112|F|127532.20|1994-05-08|2-HIGH|Clerk#000000537|0| accounts wake above the furiously unusual requests. pending package|60.3311|-151.284|60.3311|-151.284|
+1830|133|F|85122.24|1995-02-23|1-URGENT|Clerk#000000045|0|according to the even,|60.3311|-151.284|60.3311|-151.284|
+1831|71|F|58032.77|1993-12-02|1-URGENT|Clerk#000000854|0| accounts. carefully even accounts boost furiously. regular ideas engage. |60.3311|-151.284|60.3311|-151.284|
+1856|106|F|189361.42|1992-03-20|4-NOT SPECIFIED|Clerk#000000952|0|. special pinto beans run acr|60.3311|-151.284|60.3311|-151.284|
+1857|133|F|102793.59|1993-01-13|2-HIGH|Clerk#000000083|0|hely final ideas slee|60.3311|-151.284|60.3311|-151.284|
+1858|143|O|30457.91|1997-12-13|1-URGENT|Clerk#000000389|0|thely. slyly final deposits sleep|60.4311|-151.286|60.4311|-151.286|
+1859|61|O|105094.09|1997-04-11|4-NOT SPECIFIED|Clerk#000000949|0| the foxes. bravely special excuses nag carefully special r|60.3311|-151.284|60.3311|-151.284|
+1860|10|O|9103.40|1996-04-04|3-MEDIUM|Clerk#000000556|0|osits. quickly bold deposits according to |60.3311|-151.284|60.3311|-151.284|
+1861|70|F|95063.41|1994-01-03|3-MEDIUM|Clerk#000000847|0|r the fluffily close sauternes. furio|60.3311|-151.284|60.3311|-151.284|
+1862|34|O|97981.06|1998-02-24|5-LOW|Clerk#000000348|0|ts snooze ironically abou|60.3311|-151.284|60.3311|-151.284|
+1863|74|F|96359.65|1993-09-23|4-NOT SPECIFIED|Clerk#000000658|0|old sentiments. careful, |60.3191|-151.296|60.3191|-151.296|
+1888|121|F|224724.11|1993-10-31|4-NOT SPECIFIED|Clerk#000000659|0|olites. pinto beans cajole. regular deposits affix. slyly regular|60.3311|-151.284|60.3311|-151.284|
+1889|25|O|96431.77|1997-03-16|1-URGENT|Clerk#000000854|0|p around the regular notornis. unusual deposits|60.3311|-151.284|60.3311|-151.284|
+1890|10|O|202364.58|1996-12-18|4-NOT SPECIFIED|Clerk#000000627|0|romise final, regular deposits. regular fox|60.3311|-151.284|60.3311|-151.284|
+1891|61|F|76848.96|1994-12-15|5-LOW|Clerk#000000495|0|unusual foxes sleep regular deposits. requests wake special pac|60.5563|-151.241|60.5563|-151.241|
+1892|25|F|133273.64|1994-03-26|5-LOW|Clerk#000000733|0|sts. slyly regular dependencies use slyly. ironic, spec|60.6331|-151.163|60.6331|-151.163|
+1893|125|O|116792.13|1997-10-30|2-HIGH|Clerk#000000111|0|olites. silent, special deposits eat slyly quickly express packages; hockey p|60.6331|-151.163|60.6331|-151.163|
+1894|76|F|44387.23|1992-03-30|1-URGENT|Clerk#000000626|0|e furiously. furiously even accounts are slyly final accounts. closely speci|60.6331|-151.163|60.6331|-151.163|
+1895|7|F|44429.81|1994-05-30|3-MEDIUM|Clerk#000000878|0|ress accounts. bold accounts cajole. slyly final pinto beans poach regul|60.6331|-151.163|60.6331|-151.163|
+1920|110|O|119605.91|1998-06-24|5-LOW|Clerk#000000018|0|hely; furiously regular excuses|60.5551|-151.245|60.5551|-151.245|
+1921|88|F|57584.12|1994-01-18|3-MEDIUM|Clerk#000000293|0|counts. slyly quiet requests along the ruthlessly regular accounts are |60.6331|-151.163|60.6331|-151.163|
+1922|56|O|11575.77|1996-07-13|3-MEDIUM|Clerk#000000984|0|side of the blithely final re|60.5506|-151.141|60.5506|-151.141|
+1923|136|O|171128.10|1997-07-07|1-URGENT|Clerk#000000471|0| express dolphins. |60.5681|-151.281|60.5681|-151.281|
+1924|76|O|169756.19|1996-09-07|4-NOT SPECIFIED|Clerk#000000823|0| of the ironic accounts. instructions near the final instr|60.5465|-151.147|60.5465|-151.147|
+1925|17|F|146382.71|1992-03-05|1-URGENT|Clerk#000000986|0|e slyly regular deposits. furiously |60.6331|-151.163|60.6331|-151.163|
+1926|94|O|100035.03|1996-01-31|2-HIGH|Clerk#000000568|0|cajole. even warhorses sleep carefully. |60.5578|-151.116|60.5578|-151.116|
+1927|140|O|23327.88|1995-09-30|3-MEDIUM|Clerk#000000616|0|riously special packages. permanent pearls wake furiously. even packages alo|61.6182|-149.385|61.6182|-149.385|
+1952|67|F|12896.25|1994-03-16|2-HIGH|Clerk#000000254|0| silent accounts boost |61.6141|-149.457|61.6141|-149.457|
+1953|149|F|57213.18|1993-11-30|3-MEDIUM|Clerk#000000891|0| fluffily along the quickly even packages. |61.5765|-149.407|61.5765|-149.407|
+1954|56|O|158853.63|1997-05-31|4-NOT SPECIFIED|Clerk#000000104|0| unusual excuses cajole according to the blithely regular theodolites.|61.6091|-149.77|61.6091|-149.77|
+1955|13|F|103085.13|1992-04-20|1-URGENT|Clerk#000000792|0|ly special ideas. sometimes final |61.5821|-149.438|61.5821|-149.438|
+1956|127|F|88704.26|1992-09-20|4-NOT SPECIFIED|Clerk#000000600|0|ironic ideas are silent ideas. furiously final deposits sleep slyly carefu|61.6183|-149.373|61.6183|-149.373|
+1957|31|O|77482.87|1998-07-21|2-HIGH|Clerk#000000639|0|nding excuses about the |61.6131|-149.403|61.6131|-149.403|
+1958|53|O|176294.34|1995-09-22|5-LOW|Clerk#000000343|0| haggle blithely. flu|61.6352|-149.265|61.6352|-149.265|
+1959|43|O|62277.18|1997-01-13|4-NOT SPECIFIED|Clerk#000000631|0| cajole about the blithely express requests. even excuses mold bl|61.5751|-149.645|61.5751|-149.645|
+1984|52|O|79230.47|1998-04-01|1-URGENT|Clerk#000000416|0| slyly special instructions. unusual foxes use packages. carefully regular req|61.6168|-149.374|61.6168|-149.374|
+1985|7|F|171522.54|1994-09-02|4-NOT SPECIFIED|Clerk#000000741|0|slyly slyly even pains. slyly reg|61.5939|-149.43|61.5939|-149.43|
+1986|149|F|34269.96|1994-05-05|2-HIGH|Clerk#000000609|0|across the theodolites. quick|61.5792|-149.495|61.5792|-149.495|
+1987|100|F|6406.29|1994-04-30|2-HIGH|Clerk#000000652|0|gular platelets alongside |61.6141|-149.457|61.6141|-149.457|
+1988|109|O|117132.72|1995-10-06|4-NOT SPECIFIED|Clerk#000000011|0|ly ironic dolphins serve quickly busy accounts. bu|61.5829|-149.448|61.5829|-149.448|
+1989|118|F|39263.28|1994-03-16|4-NOT SPECIFIED|Clerk#000000747|0|ely bold pinto beans ha|61.5938|-149.387|61.5938|-149.387|
+1990|119|F|48781.39|1994-12-16|2-HIGH|Clerk#000000114|0|e bold patterns. always regul|61.5849|-149.38|61.5849|-149.38|
+1991|19|F|139854.41|1992-09-07|4-NOT SPECIFIED|Clerk#000000854|0|ing accounts can haggle at the carefully final Tiresias-- pending, regular|61.5729|-149.389|61.5729|-149.389|
+2016|8|O|24347.36|1996-08-16|3-MEDIUM|Clerk#000000641|0|the carefully ironic foxes. requests nag bold, r|61.5823|-149.462|61.5823|-149.462|
+2017|101|O|70529.27|1998-05-13|3-MEDIUM|Clerk#000000427|0|nusual requests. blit|61.57|-149.331|61.57|-149.331|
+2018|19|P|25007.95|1995-04-05|4-NOT SPECIFIED|Clerk#000000920|0|gular accounts wake fur|61.5821|-149.438|61.5821|-149.438|
+2019|136|F|43789.14|1992-10-23|1-URGENT|Clerk#000000565|0| furiously bold packages. fluffily fi|61.6141|-149.457|61.6141|-149.457|
+2020|73|F|136162.13|1993-06-21|3-MEDIUM|Clerk#000000192|0|es. furiously regular packages above the furiously special theodolites are a|61.6115|-149.331|61.6115|-149.331|
+2021|70|O|27016.74|1995-07-15|1-URGENT|Clerk#000000155|0|ong the furiously regular requests. unusual deposits wake fluffily inside|61.6091|-149.77|61.6091|-149.77|
+2022|62|F|206742.11|1992-03-15|1-URGENT|Clerk#000000268|0| dependencies sleep fluffily even, ironic deposits. express, silen|61.6141|-149.457|61.6141|-149.457|
+2023|118|F|144123.37|1992-05-06|5-LOW|Clerk#000000137|0|ular courts engage according to the|61.5826|-149.427|61.5826|-149.427|
+2048|17|F|33401.77|1993-11-15|1-URGENT|Clerk#000000934|0|s cajole after the blithely final accounts. f|61.5976|-149.366|61.5976|-149.366|
+2049|31|O|153048.74|1995-12-07|2-HIGH|Clerk#000000859|0|ly regular requests thrash blithely about the fluffily even theodolites. r|61.5976|-149.366|61.5976|-149.366|
+2050|28|F|208517.98|1994-06-02|4-NOT SPECIFIED|Clerk#000000821|0|d accounts against the furiously regular packages use bli|61.5531|-149.651|61.5531|-149.651|
+2051|40|O|87988.34|1996-03-18|4-NOT SPECIFIED|Clerk#000000333|0|ctions sleep blithely. blithely regu|61.5531|-149.651|61.5531|-149.651|
+2052|91|F|141822.19|1992-04-13|2-HIGH|Clerk#000000767|0| requests sleep around the even, even courts. ironic theodolites affix furious|61.5883|-149.456|61.5883|-149.456|
+2053|142|F|125125.57|1995-02-07|1-URGENT|Clerk#000000717|0|ar requests: blithely sly accounts boost carefully across t|61.6249|-149.435|61.6249|-149.435|
+2054|41|F|144335.16|1992-06-08|4-NOT SPECIFIED|Clerk#000000103|0|l requests affix carefully about the furiously special|61.6141|-149.457|61.6141|-149.457|
+2055|97|F|57092.26|1993-09-04|1-URGENT|Clerk#000000067|0|. warhorses affix slyly blithely express instructions? fur|61.5709|-149.452|61.5709|-149.452|
+2080|95|F|45767.69|1993-06-18|5-LOW|Clerk#000000190|0|ironic, pending theodolites are carefully about the quickly regular theodolite|61.6651|-149.465|61.6651|-149.465|
+2081|121|O|145654.97|1997-07-05|2-HIGH|Clerk#000000136|0|ong the regular theo|61.5841|-149.441|61.5841|-149.441|
+2082|49|F|46753.63|1995-01-10|2-HIGH|Clerk#000000354|0|cial accounts. ironic, express dolphins nod slyly sometimes final reques|61.1571|-149.883|61.1571|-149.883|
+2083|101|F|31795.52|1993-07-14|3-MEDIUM|Clerk#000000361|0|al patterns. bold, final foxes nag bravely about the furiously express|61.2198|-149.733|61.2198|-149.733|
+2084|80|F|190652.53|1993-03-17|2-HIGH|Clerk#000000048|0|zle furiously final, careful packages. slyly ironic ideas amo|61.1863|-149.976|61.1863|-149.976|
+2085|49|F|45311.07|1993-11-21|3-MEDIUM|Clerk#000000818|0|ress, express ideas haggle|61.2161|-149.876|61.2161|-149.876|
+2086|142|F|188985.18|1994-10-19|1-URGENT|Clerk#000000046|0| permanently regular|61.2031|-149.749|61.2031|-149.749|
+2087|50|O|53581.41|1998-01-31|2-HIGH|Clerk#000000626|0|e always regular packages nod against the furiously spec|61.1644|-149.897|61.1644|-149.897|
+2112|64|O|17986.15|1997-02-05|2-HIGH|Clerk#000000351|0|against the slyly even id|61.1834|-149.866|61.1834|-149.866|
+2113|32|O|65678.21|1997-11-08|2-HIGH|Clerk#000000527|0|slyly regular instruct|61.1731|-149.889|61.1731|-149.889|
+2114|79|F|106446.02|1995-01-16|5-LOW|Clerk#000000751|0|r, unusual accounts haggle across the busy platelets. carefully |61.1089|-149.854|61.1089|-149.854|
+2115|106|O|134814.65|1998-05-23|4-NOT SPECIFIED|Clerk#000000101|0|odolites boost. carefully regular excuses cajole. quickly ironic pinto be|61.1951|-149.916|61.1951|-149.916|
+2116|23|F|60887.90|1994-08-26|1-URGENT|Clerk#000000197|0|efully after the asymptotes. furiously sp|61.2157|-149.821|61.2157|-149.821|
+2117|22|O|145713.03|1997-04-26|2-HIGH|Clerk#000000887|0|ely even dependencies. regular foxes use blithely.|61.1372|-149.954|61.1372|-149.954|
+2118|134|O|38974.67|1996-10-09|1-URGENT|Clerk#000000196|0|ial requests wake carefully special packages. f|61.1955|-149.737|61.1955|-149.737|
+2119|64|O|34632.57|1996-08-20|2-HIGH|Clerk#000000434|0|uickly pending escapades. fluffily ir|61.1444|-149.867|61.1444|-149.867|
+2144|136|F|119917.28|1994-03-29|3-MEDIUM|Clerk#000000546|0|t. carefully quick requests across the deposits wake regu|61.2178|-149.882|61.2178|-149.882|
+2145|134|F|18885.35|1992-10-03|1-URGENT|Clerk#000000886|0|sts would snooze blithely alongside of th|61.1824|-149.849|61.1824|-149.849|
+2146|118|F|179686.07|1992-09-14|4-NOT SPECIFIED|Clerk#000000476|0|ven packages. dependencies wake slyl|61.2161|-149.876|61.2161|-149.876|
+2147|100|F|91513.79|1992-09-06|4-NOT SPECIFIED|Clerk#000000424|0| haggle carefully furiously final foxes. pending escapades thrash. bold theod|61.2022|-149.84|61.2022|-149.84|
+2148|130|F|19612.03|1995-04-19|4-NOT SPECIFIED|Clerk#000000517|0|ross the furiously unusual theodolites. always expre|61.2099|-149.762|61.2099|-149.762|
+2149|101|F|105145.40|1993-03-13|5-LOW|Clerk#000000555|0|nusual accounts nag furiously special reques|61.1951|-149.84|61.1951|-149.84|
+2150|82|F|166961.06|1994-06-03|3-MEDIUM|Clerk#000000154|0|ect slyly against the even, final packages. quickly regular pinto beans wake c|61.1069|-149.859|61.1069|-149.859|
+2151|58|O|124608.69|1996-11-11|3-MEDIUM|Clerk#000000996|0|c requests. ironic platelets cajole across the quickly fluffy deposits.|61.1635|-149.881|61.1635|-149.881|
+2176|104|F|87248.17|1992-11-10|1-URGENT|Clerk#000000195|0|s haggle regularly accor|61.1201|-149.89|61.1201|-149.89|
+2177|136|O|183493.42|1997-01-20|3-MEDIUM|Clerk#000000161|0|ove the blithely unusual packages cajole carefully fluffily special request|61.1902|-149.908|61.1902|-149.908|
+2178|8|O|79594.68|1996-12-12|3-MEDIUM|Clerk#000000656|0|thely according to the instructions. furious|61.2104|-149.857|61.2104|-149.857|
+2179|41|O|77487.09|1996-09-07|2-HIGH|Clerk#000000935|0|ounts alongside of the furiously unusual braids cajol|61.1771|-149.97|61.1771|-149.97|
+2180|76|O|208481.57|1996-09-14|4-NOT SPECIFIED|Clerk#000000650|0|xpress, unusual pains. furiously ironic excu|61.1859|-149.976|61.1859|-149.976|
+2181|76|O|100954.64|1995-09-13|3-MEDIUM|Clerk#000000814|0|y against the ironic, even|61.2171|-149.9|61.2171|-149.9|
+2182|23|F|116003.11|1994-04-05|2-HIGH|Clerk#000000071|0|ccounts. quickly bold deposits across the excuses sl|61.1162|-149.755|61.1162|-149.755|
+2183|113|O|49841.12|1996-06-22|1-URGENT|Clerk#000000287|0| among the express, ironic packages. slyly ironic platelets integrat|61.1381|-149.844|61.1381|-149.844|
+2208|68|P|245388.06|1995-05-01|4-NOT SPECIFIED|Clerk#000000900|0|symptotes wake slyly blithely unusual packages.|61.1775|-149.941|61.1775|-149.941|
+2209|91|F|129086.93|1992-07-10|2-HIGH|Clerk#000000056|0|er above the slyly silent requests. furiously reg|61.1938|-149.878|61.1938|-149.878|
+2210|32|F|31689.46|1992-01-16|2-HIGH|Clerk#000000941|0| believe carefully quickly express pinto beans. deposi|61.1571|-149.883|61.1571|-149.883|
+2211|92|F|140031.23|1994-06-30|2-HIGH|Clerk#000000464|0|ffily bold courts e|61.1541|-149.958|61.1541|-149.958|
+2212|118|F|17231.05|1994-03-23|3-MEDIUM|Clerk#000000954|0|structions above the unusual requests use fur|61.135|-149.88|61.135|-149.88|
+2213|122|F|146136.10|1993-01-15|4-NOT SPECIFIED|Clerk#000000598|0|osits are carefully reg|61.1101|-149.857|61.1101|-149.857|
+2214|115|O|150345.63|1998-05-05|3-MEDIUM|Clerk#000000253|0|packages. fluffily even accounts haggle blithely. carefully ironic depen|61.1101|-149.857|61.1101|-149.857|
+2215|40|O|108239.46|1996-06-16|4-NOT SPECIFIED|Clerk#000000817|0|le final, final foxes. quickly regular gifts are carefully deposit|61.1101|-149.857|61.1101|-149.857|
+2240|56|F|174090.30|1992-03-06|4-NOT SPECIFIED|Clerk#000000622|0|accounts against the slyly express foxes are after the slyly regular |61.1101|-149.857|61.1101|-149.857|
+2241|103|F|165219.08|1993-05-11|1-URGENT|Clerk#000000081|0|y about the silent excuses. furiously ironic instructions along the sil|61.1101|-149.857|61.1101|-149.857|
+2242|82|O|15082.82|1997-07-20|4-NOT SPECIFIED|Clerk#000000360|0| pending multipliers. carefully express asymptotes use quickl|61.1101|-149.857|61.1101|-149.857|
+2243|49|O|10451.97|1995-06-10|2-HIGH|Clerk#000000813|0|ously regular deposits integrate s|61.1101|-149.857|61.1101|-149.857|
+2244|127|F|21207.08|1993-01-09|1-URGENT|Clerk#000001000|0|ckages. ironic, ironic accounts haggle blithely express excuses. |61.1101|-149.857|61.1101|-149.857|
+2245|58|F|150585.73|1993-04-28|3-MEDIUM|Clerk#000000528|0|ake carefully. braids haggle slyly quickly b|61.1101|-149.857|61.1101|-149.857|
+2246|113|O|85755.84|1996-05-27|4-NOT SPECIFIED|Clerk#000000739|0| final gifts sleep |61.1101|-149.857|61.1101|-149.857|
+2247|95|F|13491.31|1992-08-02|4-NOT SPECIFIED|Clerk#000000947|0|furiously regular packages. final brai|61.1101|-149.857|61.1101|-149.857|
+2272|139|F|127934.71|1993-04-13|2-HIGH|Clerk#000000449|0|s. bold, ironic pinto beans wake. silently specia|61.1101|-149.857|61.1101|-149.857|
+2273|136|O|142291.79|1996-12-14|5-LOW|Clerk#000000155|0|uickly express foxes haggle quickly against|61.1101|-149.857|61.1101|-149.857|
+2274|104|F|58273.89|1993-09-04|4-NOT SPECIFIED|Clerk#000000258|0|nstructions try to hag|61.1101|-149.857|61.1101|-149.857|
+2275|149|F|37398.90|1992-10-22|4-NOT SPECIFIED|Clerk#000000206|0| furiously furious platelets. slyly final packa|61.1101|-149.857|61.1101|-149.857|
+2276|43|O|141159.63|1996-04-29|4-NOT SPECIFIED|Clerk#000000821|0|ecial requests. fox|61.1101|-149.857|61.1101|-149.857|
+2277|89|F|79270.23|1995-01-02|4-NOT SPECIFIED|Clerk#000000385|0|accounts cajole. even i|61.1101|-149.857|61.1101|-149.857|
+2278|142|O|101878.46|1998-04-25|3-MEDIUM|Clerk#000000186|0|r pinto beans integrate after the carefully even deposits. blit|61.1101|-149.857|61.1101|-149.857|
+2279|80|F|142322.33|1993-02-23|3-MEDIUM|Clerk#000000898|0|de of the quickly unusual instructio|61.2141|-149.864|61.2141|-149.864|
+2304|46|F|93769.28|1994-01-07|4-NOT SPECIFIED|Clerk#000000415|0|onic platelets. ironic packages haggle. packages nag doggedly according to|61.2171|-149.9|61.2171|-149.9|
+2305|43|F|122964.66|1993-01-26|2-HIGH|Clerk#000000440|0|ove the furiously even acco|61.2171|-149.9|61.2171|-149.9|
+2306|28|O|244704.23|1995-07-26|2-HIGH|Clerk#000000975|0| wake furiously requests. permanent requests affix. final packages caj|61.2171|-149.9|61.2171|-149.9|
+2307|106|F|59417.76|1993-06-29|5-LOW|Clerk#000000952|0|furiously even asymptotes? carefully regular accounts|61.2171|-149.9|61.2171|-149.9|
+2308|25|F|58546.02|1992-10-25|4-NOT SPECIFIED|Clerk#000000609|0|ts. slyly final depo|61.2171|-149.9|61.2171|-149.9|
+2309|100|O|146933.07|1995-09-04|5-LOW|Clerk#000000803|0|he carefully pending packages. fluffily stealthy foxes engage carefully|61.2171|-149.9|61.2171|-149.9|
+2310|31|O|82928.12|1996-09-20|5-LOW|Clerk#000000917|0|wake carefully. unusual instructions nag ironic, regular excuse|61.2171|-149.9|61.2171|-149.9|
+2311|73|P|153233.93|1995-05-02|2-HIGH|Clerk#000000761|0|ly pending asymptotes-- furiously bold excus|61.2171|-149.9|61.2171|-149.9|
+2336|142|O|22294.51|1996-01-07|4-NOT SPECIFIED|Clerk#000000902|0|c, final excuses sleep furiously among the even theodolites. f|61.2171|-149.9|61.2171|-149.9|
+2337|142|O|45704.96|1997-06-18|4-NOT SPECIFIED|Clerk#000000754|0| quickly. final accounts haggle. carefully final acco|61.2171|-149.9|61.2171|-149.9|
+2338|140|O|28155.92|1997-09-15|2-HIGH|Clerk#000000951|0|riously final dugouts. final, ironic packages wake express, ironic id|61.2171|-149.9|61.2171|-149.9|
+2339|109|F|63470.78|1993-12-15|5-LOW|Clerk#000000847|0| against the regular |61.2171|-149.9|61.2171|-149.9|
+2340|65|O|30778.78|1996-01-12|1-URGENT|Clerk#000000964|0|ter the deposits sleep according to the slyly regular packages. carefully |61.2171|-149.9|61.2171|-149.9|
+2341|82|F|55950.21|1993-05-30|5-LOW|Clerk#000000443|0|sts-- blithely bold dolphins through the deposits nag blithely carefully re|61.2171|-149.9|61.2171|-149.9|
+2342|37|O|104038.78|1996-06-09|1-URGENT|Clerk#000000615|0|oost carefully across the regular accounts. blithely final d|61.2171|-149.9|61.2171|-149.9|
+2343|73|O|85381.00|1995-08-21|3-MEDIUM|Clerk#000000170|0|fluffily over the slyly special deposits. quickl|64.8487|-147.704|64.8487|-147.704|
+2368|13|F|101240.96|1993-08-20|1-URGENT|Clerk#000000830|0|t the bold instructions. carefully unusual |64.8486|-147.705|64.8486|-147.705|
+2369|110|O|73517.91|1996-12-24|2-HIGH|Clerk#000000752|0|iously even requests are dogged, express |64.8087|-147.71|64.8087|-147.71|
+2370|142|F|73924.21|1994-01-17|1-URGENT|Clerk#000000231|0|lyly final packages. quickly final deposits haggl|64.8363|-147.758|64.8363|-147.758|
+2371|19|O|193857.67|1998-01-07|1-URGENT|Clerk#000000028|0|ckages haggle at th|64.8476|-147.704|64.8476|-147.704|
+2372|31|O|104927.66|1997-11-21|5-LOW|Clerk#000000342|0|s: deposits haggle along the final ideas. careful|64.8302|-147.744|64.8302|-147.744|
+2373|28|F|55211.04|1994-03-12|4-NOT SPECIFIED|Clerk#000000306|0| even, special courts grow quickly. pending,|64.8476|-147.812|64.8476|-147.812|
+2374|4|F|115219.88|1993-10-29|4-NOT SPECIFIED|Clerk#000000081|0| blithely regular packages. blithely unusua|64.8144|-147.756|64.8144|-147.756|
+2375|5|O|106612.48|1996-11-20|3-MEDIUM|Clerk#000000197|0|unusual, pending theodolites cajole carefully |64.8183|-147.778|64.8183|-147.778|
+2400|37|O|92798.66|1998-07-25|5-LOW|Clerk#000000782|0|nusual courts nag against the carefully unusual pinto b|64.8494|-147.818|64.8494|-147.818|
+2401|148|O|88448.24|1997-07-29|4-NOT SPECIFIED|Clerk#000000531|0|ully unusual instructions boost carefully silently regular requests. |64.849|-147.822|64.849|-147.822|
+2402|67|O|70403.62|1996-09-06|4-NOT SPECIFIED|Clerk#000000162|0|slyly final sheaves sleep slyly. q|64.8367|-147.716|64.8367|-147.716|
+2403|55|O|111020.79|1998-04-11|3-MEDIUM|Clerk#000000820|0|furiously regular deposits use. furiously unusual accounts wake along the |64.8127|-147.772|64.8127|-147.772|
+2404|77|O|109077.69|1997-03-13|4-NOT SPECIFIED|Clerk#000000409|0|deposits breach furiously. ironic foxes haggle carefully bold packag|64.8143|-147.751|64.8143|-147.751|
+2405|73|O|115929.14|1996-12-23|3-MEDIUM|Clerk#000000535|0|ular, regular asympto|64.842|-147.721|64.842|-147.721|
+2406|7|O|182516.77|1996-10-28|5-LOW|Clerk#000000561|0|blithely regular accounts u|64.8403|-147.714|64.8403|-147.714|
+2407|55|O|112843.52|1998-06-19|2-HIGH|Clerk#000000068|0|uests affix slyly among the slyly regular depos|64.8371|-147.881|64.8371|-147.881|
+2432|103|O|62661.93|1996-07-13|1-URGENT|Clerk#000000115|0|re. slyly even deposits wake bra|64.8151|-147.707|64.8151|-147.707|
+2433|31|F|147071.86|1994-08-22|4-NOT SPECIFIED|Clerk#000000324|0|ess patterns are slyly. packages haggle carefu|64.8151|-147.707|64.8151|-147.707|
+2434|25|O|123956.25|1997-04-27|3-MEDIUM|Clerk#000000190|0|s. quickly ironic dolphins impress final deposits. blithel|64.8541|-147.81|64.8541|-147.81|
+2435|73|F|122490.66|1993-02-21|5-LOW|Clerk#000000112|0|es are carefully along the carefully final instructions. pe|64.8878|-147.496|64.8878|-147.496|
+2436|125|O|73990.08|1995-09-11|4-NOT SPECIFIED|Clerk#000000549|0|arefully. blithely bold deposits affix special accounts. final foxes nag. spe|64.8299|-147.728|64.8299|-147.728|
+2437|85|F|143411.69|1993-04-21|4-NOT SPECIFIED|Clerk#000000578|0|. theodolites wake slyly-- ironic, pending platelets above the carefully exp|64.8132|-147.762|64.8132|-147.762|
+2438|13|F|214494.39|1993-07-15|2-HIGH|Clerk#000000744|0|the final, regular warhorses. regularly |64.8372|-147.713|64.8372|-147.713|
+2439|55|O|41811.12|1997-03-15|2-HIGH|Clerk#000000819|0|lithely after the car|64.7927|-148.036|64.7927|-148.036|
+2464|145|O|30495.65|1997-11-23|5-LOW|Clerk#000000633|0|le about the instructions. courts wake carefully even|64.8717|-147.819|64.8717|-147.819|
+2465|34|O|180737.75|1995-06-24|1-URGENT|Clerk#000000078|0|al pinto beans. final, bold packages wake quickly|64.8527|-147.686|64.8527|-147.686|
+2466|19|F|161625.50|1994-03-06|1-URGENT|Clerk#000000424|0|c pinto beans. express deposits wake quickly. even, final courts nag. package|64.8371|-147.811|64.8371|-147.811|
+2467|35|O|7231.91|1995-07-16|4-NOT SPECIFIED|Clerk#000000914|0|pades sleep furiously. sometimes regular packages again|64.846|-147.705|64.846|-147.705|
+2468|112|O|160627.01|1997-06-09|4-NOT SPECIFIED|Clerk#000000260|0|ickly regular packages. slyly ruthless requests snooze quickly blithe|64.9064|-147.726|64.9064|-147.726|
+2469|124|O|192074.23|1996-11-26|5-LOW|Clerk#000000730|0| sleep closely regular instructions. furiously ironic instructi|64.9347|-147.56|64.9347|-147.56|
+2470|58|O|104966.33|1997-04-19|3-MEDIUM|Clerk#000000452|0|to the furiously final packages? pa|64.8861|-147.677|64.8861|-147.677|
+2471|89|O|34936.31|1998-03-12|4-NOT SPECIFIED|Clerk#000000860|0|carefully blithely regular pac|64.8302|-147.744|64.8302|-147.744|
+2496|136|F|140390.60|1994-01-09|2-HIGH|Clerk#000000142|0|slyly. pending instructions sleep. quic|60.6673|-151.311|60.6673|-151.311|
+2497|47|F|171326.48|1992-08-27|1-URGENT|Clerk#000000977|0|ily ironic pinto beans. furiously final platelets alongside of t|60.6997|-151.38|60.6997|-151.38|
+2498|97|F|45514.27|1993-11-08|5-LOW|Clerk#000000373|0|g the slyly special pinto beans. |60.5658|-151.244|60.5658|-151.244|
+2499|121|O|147243.86|1995-09-24|1-URGENT|Clerk#000000277|0|r the quickly bold foxes. bold instructi|60.6331|-151.163|60.6331|-151.163|
+2500|133|F|131122.82|1992-08-15|2-HIGH|Clerk#000000447|0|integrate slyly pending deposits. furiously ironic accounts across the s|60.6331|-151.163|60.6331|-151.163|
+2501|67|O|79380.51|1997-05-25|5-LOW|Clerk#000000144|0|ickly special theodolite|60.6331|-151.163|60.6331|-151.163|
+2502|70|F|33470.40|1993-05-28|4-NOT SPECIFIED|Clerk#000000914|0|lyly: carefully pending ideas affix again|60.6201|-151.332|60.6201|-151.332|
+2503|7|F|183671.08|1993-06-20|3-MEDIUM|Clerk#000000294|0|ly even packages was. ironic, regular deposits unwind furiously across the p|60.5004|-151.276|60.5004|-151.276|
+2528|55|F|92069.62|1994-11-20|1-URGENT|Clerk#000000789|0|ular dependencies? regular frays kindle according to the blith|60.6331|-151.163|60.6331|-151.163|
+2529|136|O|4104.30|1996-08-20|2-HIGH|Clerk#000000511|0|posits across the silent instructions wake blithely across |60.6331|-151.163|60.6331|-151.163|
+2530|128|F|58853.11|1994-03-21|3-MEDIUM|Clerk#000000291|0|ular instructions about the quic|60.6901|-151.321|60.6901|-151.321|
+2531|44|O|143212.85|1996-05-06|4-NOT SPECIFIED|Clerk#000000095|0|even accounts. furiously ironic excuses sleep fluffily. carefully silen|60.6676|-151.29|60.6676|-151.29|
+2532|94|O|116093.49|1995-10-11|2-HIGH|Clerk#000000498|0|the blithely pending accounts. regular, regular excuses boost aro|60.6331|-151.163|60.6331|-151.163|
+2533|50|O|168495.03|1997-03-24|1-URGENT|Clerk#000000594|0|ecial instructions. spec|60.5632|-151.266|60.5632|-151.266|
+2534|76|O|202784.54|1996-07-17|3-MEDIUM|Clerk#000000332|0|packages cajole ironic requests. furiously regular|60.6331|-151.163|60.6331|-151.163|
+2535|121|F|67018.30|1993-05-25|5-LOW|Clerk#000000296|0|phins cajole beneath the fluffily express asymptotes. c|60.6331|-151.163|60.6331|-151.163|
+2560|131|F|153426.79|1992-09-05|1-URGENT|Clerk#000000538|0|atelets; quickly sly requests|60.6509|-151.342|60.6509|-151.342|
+2561|58|O|137473.58|1997-11-14|1-URGENT|Clerk#000000861|0|ual requests. unusual deposits cajole furiously pending, regular platelets. |60.5601|-151.107|60.5601|-151.107|
+2562|10|F|136360.37|1992-08-01|1-URGENT|Clerk#000000467|0|elets. pending dolphins promise slyly. bo|60.5123|-151.275|60.5123|-151.275|
+2563|62|F|168952.10|1993-11-19|4-NOT SPECIFIED|Clerk#000000150|0|sly even packages after the furio|60.6076|-151.325|60.6076|-151.325|
+2564|77|F|3967.47|1994-09-09|2-HIGH|Clerk#000000718|0|usly regular pinto beans. orbits wake carefully. slyly e|60.6331|-151.163|60.6331|-151.163|
+2565|56|O|204438.57|1998-02-28|3-MEDIUM|Clerk#000000032|0|x-ray blithely along|60.5175|-151.235|60.5175|-151.235|
+2566|86|F|89992.48|1992-10-10|3-MEDIUM|Clerk#000000414|0|ructions boost bold ideas. idly ironic accounts use according to th|60.5535|-151.108|60.5535|-151.108|
+2567|70|O|263411.29|1998-02-27|2-HIGH|Clerk#000000031|0|detect. furiously ironic requests|60.5614|-151.275|60.5614|-151.275|
+2592|101|F|8225.96|1993-03-05|4-NOT SPECIFIED|Clerk#000000524|0|ts nag fluffily. quickly stealthy theodolite|60.5647|-151.195|60.5647|-151.195|
+2593|92|F|134726.09|1993-09-04|2-HIGH|Clerk#000000468|0|r the carefully final|60.6331|-151.163|60.6331|-151.163|
+2594|79|F|94866.39|1992-12-17|1-URGENT|Clerk#000000550|0|ests. theodolites above the blithely even accounts detect furio|60.6331|-151.163|60.6331|-151.163|
+2595|74|O|173130.20|1995-12-14|4-NOT SPECIFIED|Clerk#000000222|0|arefully ironic requests nag carefully ideas. |60.6331|-151.163|60.6331|-151.163|
+2596|43|O|74940.13|1996-08-17|1-URGENT|Clerk#000000242|0|requests. ironic, bold theodolites wak|60.6331|-151.163|60.6331|-151.163|
+2597|104|F|21964.66|1993-02-04|2-HIGH|Clerk#000000757|0|iously ruthless exc|60.6331|-151.163|60.6331|-151.163|
+2598|112|O|84871.50|1996-03-05|3-MEDIUM|Clerk#000000391|0| ironic notornis according to the blithely final requests should |60.6678|-151.31|60.6678|-151.31|
+2599|149|O|62807.13|1996-11-07|2-HIGH|Clerk#000000722|0|ts. slyly regular theodolites wake sil|60.5003|-151.276|60.5003|-151.276|
+2624|52|O|27148.63|1996-11-28|5-LOW|Clerk#000000930|0|ic, regular packages|60.6331|-151.163|60.6331|-151.163|
+2625|40|F|39382.74|1992-10-14|4-NOT SPECIFIED|Clerk#000000386|0| final deposits. blithely ironic ideas |61.5855|-149.326|61.5855|-149.326|
+2626|139|O|84314.51|1995-09-08|4-NOT SPECIFIED|Clerk#000000289|0|gside of the carefully special packages are furiously after the slyly express |61.5979|-149.437|61.5979|-149.437|
+2627|149|F|26798.65|1992-03-24|3-MEDIUM|Clerk#000000181|0|s. silent, ruthless requests|61.6141|-149.457|61.6141|-149.457|
+2628|56|F|165655.99|1993-10-22|5-LOW|Clerk#000000836|0|ajole across the blithely careful accounts. blithely silent deposits sl|61.5799|-149.461|61.5799|-149.461|
+2629|139|O|96458.03|1998-04-06|5-LOW|Clerk#000000680|0|uches dazzle carefully even, express excuses. ac|61.5845|-149.337|61.5845|-149.337|
+2630|85|F|127132.51|1992-10-24|5-LOW|Clerk#000000712|0|inal theodolites. ironic instructions s|61.5351|-149.558|61.5351|-149.558|
+2631|37|F|63103.32|1993-09-24|5-LOW|Clerk#000000833|0| quickly unusual deposits doubt around |61.5811|-149.45|61.5811|-149.45|
+2656|77|F|105492.37|1993-05-04|1-URGENT|Clerk#000000307|0|elets. slyly final accou|61.5793|-149.442|61.5793|-149.442|
+2657|25|O|148176.06|1995-10-17|2-HIGH|Clerk#000000160|0| foxes-- slyly final dependencies around the slyly final theodo|61.5661|-149.313|61.5661|-149.313|
+2658|14|O|163834.46|1995-09-23|3-MEDIUM|Clerk#000000400|0|bout the slyly regular accounts. ironic, |61.6141|-149.457|61.6141|-149.457|
+2659|83|F|79785.52|1993-12-18|4-NOT SPECIFIED|Clerk#000000758|0|cross the pending requests maintain |61.5786|-149.332|61.5786|-149.332|
+2660|127|O|16922.51|1995-08-05|5-LOW|Clerk#000000480|0|ly finally regular deposits. ironic theodolites cajole|61.5811|-149.45|61.5811|-149.45|
+2661|74|O|106036.84|1997-01-04|3-MEDIUM|Clerk#000000217|0|al, regular pinto beans. silently final deposits should have t|61.5825|-149.429|61.5825|-149.429|
+2662|37|O|87689.88|1996-08-21|3-MEDIUM|Clerk#000000589|0|bold pinto beans above the slyly final accounts affix furiously deposits. pac|61.6141|-149.457|61.6141|-149.457|
+2663|95|O|35131.80|1995-09-06|1-URGENT|Clerk#000000950|0|ar requests. furiously final dolphins along the fluffily spe|61.5531|-149.651|61.5531|-149.651|
+2688|98|F|181077.36|1992-01-24|2-HIGH|Clerk#000000720|0|have to nag according to the pending theodolites. sly|61.5531|-149.651|61.5531|-149.651|
+2689|103|F|41552.78|1992-04-09|4-NOT SPECIFIED|Clerk#000000698|0|press pains wake. furiously express theodolites alongsid|61.5698|-149.62|61.5698|-149.62|
+2690|94|O|224674.27|1996-03-31|3-MEDIUM|Clerk#000000760|0|ravely even theodolites |61.6141|-149.457|61.6141|-149.457|
+2691|7|F|30137.17|1992-04-30|5-LOW|Clerk#000000439|0|es at the regular deposits sleep slyly by the fluffy requests. eve|61.5474|-149.458|61.5474|-149.458|
+2692|62|O|24265.24|1997-12-02|3-MEDIUM|Clerk#000000878|0|es. regular asymptotes cajole above t|61.5825|-149.429|61.5825|-149.429|
+2693|19|O|66158.13|1996-09-04|1-URGENT|Clerk#000000370|0|ndle never. blithely regular packages nag carefully enticing platelets. ca|61.5955|-149.423|61.5955|-149.423|
+2694|121|O|102807.59|1996-03-14|5-LOW|Clerk#000000722|0| requests. bold deposits above the theodol|61.5801|-149.461|61.5801|-149.461|
+2695|58|O|138584.20|1996-08-20|1-URGENT|Clerk#000000697|0|ven deposits around the quickly regular packa|61.5785|-149.415|61.5785|-149.415|
+2720|31|F|161307.05|1993-06-08|1-URGENT|Clerk#000000948|0|quickly. special asymptotes are fluffily ironi|61.6402|-149.34|61.6402|-149.34|
+2721|79|O|59180.25|1996-01-27|2-HIGH|Clerk#000000401|0| ideas eat even, unusual ideas. theodolites are carefully|61.583|-149.457|61.583|-149.457|
+2722|35|F|50328.84|1994-04-09|5-LOW|Clerk#000000638|0|rding to the carefully quick deposits. bli|61.5907|-149.295|61.5907|-149.295|
+2723|61|O|104759.25|1995-10-06|5-LOW|Clerk#000000836|0|nts must have to cajo|61.6141|-149.457|61.6141|-149.457|
+2724|137|F|116069.66|1994-09-14|2-HIGH|Clerk#000000217|0| sleep blithely. blithely idle |61.5933|-149.397|61.5933|-149.397|
+2725|89|F|75144.68|1994-05-21|4-NOT SPECIFIED|Clerk#000000835|0|ular deposits. spec|61.6091|-149.77|61.6091|-149.77|
+2726|7|F|47753.00|1992-11-27|5-LOW|Clerk#000000470|0| blithely even dinos sleep care|61.577|-149.411|61.577|-149.411|
+2727|74|O|3089.42|1998-04-19|4-NOT SPECIFIED|Clerk#000000879|0|sual theodolites cajole enticingly above the furiously fin|61.6078|-149.322|61.6078|-149.322|
+2752|59|F|187932.30|1993-11-19|2-HIGH|Clerk#000000648|0| carefully regular foxes are quickly quickl|61.6131|-149.397|61.6131|-149.397|
+2753|16|F|159720.39|1993-11-30|2-HIGH|Clerk#000000380|0|ending instructions. unusual deposits|61.6648|-149.372|61.6648|-149.372|
+2754|145|F|25985.52|1994-04-03|2-HIGH|Clerk#000000960|0|cies detect slyly. |61.5531|-149.651|61.5531|-149.651|
+2755|118|F|101202.18|1992-02-07|4-NOT SPECIFIED|Clerk#000000177|0|ously according to the sly foxes. blithely regular pinto bean|61.5811|-149.45|61.5811|-149.45|
+2756|118|F|142323.38|1994-04-18|1-URGENT|Clerk#000000537|0|arefully special warho|61.583|-149.457|61.583|-149.457|
+2757|76|O|89792.48|1995-07-20|2-HIGH|Clerk#000000216|0| regular requests subl|61.1955|-149.9|61.1955|-149.9|
+2758|43|O|36671.88|1998-07-12|5-LOW|Clerk#000000863|0|s cajole according to the carefully special |61.1844|-149.897|61.1844|-149.897|
+2759|116|F|89731.10|1993-11-25|4-NOT SPECIFIED|Clerk#000000071|0|ts. regular, pending pinto beans sleep ab|61.1901|-149.892|61.1901|-149.892|
+2784|95|O|106635.21|1998-01-07|1-URGENT|Clerk#000000540|0|g deposits alongside of the silent requests s|61.1444|-149.867|61.1444|-149.867|
+2785|148|O|132854.79|1995-07-21|2-HIGH|Clerk#000000098|0|iously pending packages sleep according to the blithely unusual foxe|61.1955|-149.9|61.1955|-149.9|
+2786|79|F|178254.66|1992-03-22|2-HIGH|Clerk#000000976|0|al platelets cajole blithely ironic requests. ironic re|61.1893|-149.887|61.1893|-149.887|
+2787|103|O|3726.14|1995-09-30|1-URGENT|Clerk#000000906|0|he ironic, regular |61.2174|-149.888|61.2174|-149.888|
+2788|124|F|17172.66|1994-09-22|1-URGENT|Clerk#000000641|0|nts wake across the fluffily bold accoun|61.2227|-149.842|61.2227|-149.842|
+2789|37|O|219123.27|1998-03-14|2-HIGH|Clerk#000000972|0|gular patterns boost. carefully even re|61.1263|-149.872|61.1263|-149.872|
+2790|25|F|177458.97|1994-08-19|2-HIGH|Clerk#000000679|0| the carefully express deposits sleep slyly |61.1138|-149.866|61.1138|-149.866|
+2791|121|F|156697.55|1994-10-10|2-HIGH|Clerk#000000662|0|as. slyly ironic accounts play furiously bl|61.2157|-149.821|61.2157|-149.821|
+2816|58|F|42225.53|1994-09-20|2-HIGH|Clerk#000000289|0|kages at the final deposits cajole furious foxes. quickly |61.2174|-149.888|61.2174|-149.888|
+2817|40|F|71453.85|1994-04-19|3-MEDIUM|Clerk#000000982|0|ic foxes haggle upon the daringly even pinto beans. slyly|61.1855|-149.868|61.1855|-149.868|
+2818|49|F|120086.84|1994-12-12|3-MEDIUM|Clerk#000000413|0|eep furiously special ideas. express |61.1951|-149.873|61.1951|-149.873|
+2819|103|F|66927.16|1994-05-05|1-URGENT|Clerk#000000769|0|ngside of the blithely ironic dolphins. furio|61.1444|-149.867|61.1444|-149.867|
+2820|19|F|143813.39|1994-05-20|3-MEDIUM|Clerk#000000807|0|equests are furiously. carefu|61.1883|-149.735|61.1883|-149.735|
+2821|118|F|36592.48|1993-08-09|3-MEDIUM|Clerk#000000323|0|ng requests. even instructions are quickly express, silent instructi|61.2161|-149.876|61.2161|-149.876|
+2822|79|F|40142.15|1993-07-26|2-HIGH|Clerk#000000510|0|furiously against the accounts. unusual accounts aft|61.2161|-149.876|61.2161|-149.876|
+2823|79|O|171894.45|1995-09-09|2-HIGH|Clerk#000000567|0|encies. carefully fluffy accounts m|61.1893|-149.888|61.1893|-149.888|
+2848|70|F|116258.53|1992-03-10|1-URGENT|Clerk#000000256|0|ly fluffy foxes sleep furiously across the slyly regu|61.2174|-149.888|61.2174|-149.888|
+2849|46|O|180054.29|1996-04-30|2-HIGH|Clerk#000000659|0|al packages are after the quickly bold requests. carefully special |61.1914|-149.886|61.1914|-149.886|
+2850|100|O|122969.79|1996-10-02|2-HIGH|Clerk#000000392|0|, regular deposits. furiously pending packages hinder carefully carefully u|61.1541|-149.958|61.1541|-149.958|
+2851|145|O|7859.36|1997-09-07|5-LOW|Clerk#000000566|0|Tiresias wake quickly quickly even|61.1259|-149.717|61.1259|-149.717|
+2852|91|F|99050.81|1993-01-16|1-URGENT|Clerk#000000740|0|ruthless deposits against the final instructions use quickly al|61.2193|-149.902|61.2193|-149.902|
+2853|94|F|103641.15|1994-05-05|2-HIGH|Clerk#000000878|0|the carefully even packages.|61.1879|-149.886|61.1879|-149.886|
+2854|139|F|153568.02|1994-06-27|1-URGENT|Clerk#000000010|0| furiously ironic tithes use furiously |61.1372|-149.912|61.1372|-149.912|
+2855|49|F|48419.58|1993-04-04|4-NOT SPECIFIED|Clerk#000000973|0| silent, regular packages sleep |61.1101|-149.857|61.1101|-149.857|
+2880|8|F|145761.99|1992-03-15|2-HIGH|Clerk#000000756|0|ves maintain doggedly spec|61.1791|-149.94|61.1791|-149.94|
+2881|100|F|45695.84|1992-05-10|5-LOW|Clerk#000000864|0|uriously. slyly express requests according to the silent dol|61.2031|-149.749|61.2031|-149.749|
+2882|121|O|172872.37|1995-08-22|2-HIGH|Clerk#000000891|0|pending deposits. carefully eve|61.1914|-149.877|61.1914|-149.877|
+2883|121|F|170360.27|1995-01-23|5-LOW|Clerk#000000180|0|uses. carefully ironic accounts lose fluffil|61.1944|-149.883|61.1944|-149.883|
+2884|92|O|71683.84|1997-10-12|3-MEDIUM|Clerk#000000780|0|efully express instructions sleep against|61.1923|-149.886|61.1923|-149.886|
+2885|7|F|146896.72|1992-09-19|4-NOT SPECIFIED|Clerk#000000280|0|ly sometimes special excuses. final requests are |61.2123|-149.854|61.2123|-149.854|
+2886|109|F|94527.23|1994-11-13|4-NOT SPECIFIED|Clerk#000000619|0|uctions. ironic packages sle|61.2161|-149.876|61.2161|-149.876|
+2887|109|O|28571.39|1997-05-26|5-LOW|Clerk#000000566|0|slyly even pinto beans. slyly bold epitaphs cajole blithely above t|61.2171|-149.9|61.2171|-149.9|
+2912|94|F|27727.52|1992-03-12|5-LOW|Clerk#000000186|0|jole blithely above the quickly regular packages. carefully regular pinto bean|61.1125|-149.861|61.1125|-149.861|
+2913|43|O|130702.19|1997-07-12|3-MEDIUM|Clerk#000000118|0|mptotes doubt furiously slyly regu|61.1419|-149.896|61.1419|-149.896|
+2914|109|F|60867.14|1993-03-03|3-MEDIUM|Clerk#000000543|0|he slyly regular theodolites are furiously sile|61.145|-149.878|61.145|-149.878|
+2915|94|F|96015.13|1994-03-31|5-LOW|Clerk#000000410|0|ld packages. bold deposits boost blithely. ironic, unusual theodoli|61.1044|-149.865|61.1044|-149.865|
+2916|8|O|20182.22|1995-12-27|2-HIGH|Clerk#000000681|0|ithely blithe deposits sleep beyond the|61.1444|-149.876|61.1444|-149.876|
+2917|91|O|100714.13|1997-12-09|4-NOT SPECIFIED|Clerk#000000061|0| special dugouts among the special deposi|61.1|-149.85|61.1|-149.85|
+2918|118|O|21760.09|1996-09-08|3-MEDIUM|Clerk#000000439|0|ular deposits across th|61.1105|-149.861|61.1105|-149.861|
+2919|53|F|137223.14|1993-12-10|2-HIGH|Clerk#000000209|0|es. pearls wake quietly slyly ironic instructions--|61.1286|-149.957|61.1286|-149.957|
+2944|14|O|146581.14|1997-09-24|4-NOT SPECIFIED|Clerk#000000740|0|deas. permanently special foxes haggle carefully ab|61.1201|-149.89|61.1201|-149.89|
+2945|29|O|223507.72|1996-01-03|2-HIGH|Clerk#000000499|0|ons are carefully toward the permanent, bold pinto beans. regu|61.112|-149.871|61.112|-149.871|
+2946|125|O|102226.59|1996-02-05|5-LOW|Clerk#000000329|0|g instructions about the regular accounts sleep carefully along the pen|61.1427|-149.864|61.1427|-149.864|
+2947|70|P|43360.95|1995-04-26|1-URGENT|Clerk#000000464|0|ronic accounts. accounts run furiously d|61.1212|-149.947|61.1212|-149.947|
+2948|44|F|100758.71|1994-08-23|5-LOW|Clerk#000000701|0| deposits according to the blithely pending |61.1228|-149.939|61.1228|-149.939|
+2949|137|F|94231.71|1994-04-12|2-HIGH|Clerk#000000184|0|y ironic accounts use. quickly blithe accou|61.1093|-149.871|61.1093|-149.871|
+2950|136|O|183620.33|1997-07-06|1-URGENT|Clerk#000000833|0| dolphins around the furiously |61.145|-149.878|61.145|-149.878|
+2951|74|O|125509.17|1996-02-06|2-HIGH|Clerk#000000680|0|gular deposits above the finally regular ideas integrate idly stealthil|61.1191|-149.871|61.1191|-149.871|
+2976|29|F|145768.47|1993-12-10|4-NOT SPECIFIED|Clerk#000000159|0|. furiously ironic asymptotes haggle ruthlessly silently regular r|61.1003|-149.856|61.1003|-149.856|
+2977|73|O|25170.88|1996-08-27|3-MEDIUM|Clerk#000000252|0|quickly special platelets are furio|61.1113|-149.872|61.1113|-149.872|
+2978|44|P|139542.14|1995-05-03|1-URGENT|Clerk#000000135|0|d. even platelets are. ironic dependencies cajole slow, e|61.1084|-149.861|61.1084|-149.861|
+2979|133|O|116789.98|1996-03-23|3-MEDIUM|Clerk#000000820|0|even, ironic foxes sleep along|61.144|-149.878|61.144|-149.878|
+2980|4|O|187514.11|1996-09-14|3-MEDIUM|Clerk#000000661|0|y quick pinto beans wake. slyly re|61.1426|-149.877|61.1426|-149.877|
+2981|49|O|37776.79|1998-07-29|5-LOW|Clerk#000000299|0|hely among the express foxes. blithely stealthy requests cajole boldly. regu|61.1173|-149.861|61.1173|-149.861|
+2982|85|F|55582.94|1995-03-19|2-HIGH|Clerk#000000402|0|lyly. express theodolites affix slyly after the slyly speci|61.1347|-149.914|61.1347|-149.914|
diff --git a/hyracks-examples/hyracks-integration-tests/data/spatial.txt b/hyracks-examples/hyracks-integration-tests/data/spatial.txt
new file mode 100644
index 0000000..831e702
--- /dev/null
+++ b/hyracks-examples/hyracks-integration-tests/data/spatial.txt
@@ -0,0 +1,1079 @@
+42.3631|-71.065|42.3631|-71.065|all phase plumbing|
+42.1091|-70.696|42.1091|-70.696|gramercy cardiac diagnostic|
+40.8151|-73.0452|40.8151|-73.0452|-|
+40.8151|-73.0452|40.8151|-73.0452|bear mountain state park|
+42.2481|-71.174|42.2481|-71.174|domino's pizza|
+61.1201|-149.89|61.1201|-149.89|paul fiori|
+61.1501|-149.926|61.1501|-149.926|fire & flood svc|
+61.181|-149.814|61.181|-149.814|fire dept-hazardous materials|
+61.1517|-149.86|61.1517|-149.86|fire fighters smoke detector|
+61.1806|-149.814|61.1806|-149.814|fire marshal's office|
+61.1806|-149.775|61.1806|-149.775|fire marshall director|
+61.1806|-149.775|61.1806|-149.775|fire prevention div|
+61.1806|-149.775|61.1806|-149.775|fire service training|
+61.2113|-149.824|61.2113|-149.824|fire service training program|
+61.1967|-149.877|61.1967|-149.877|fireman's fund insurance|
+61.2164|-149.892|61.2164|-149.892|firenze design|
+61.1571|-149.883|61.1571|-149.883|fireplace gallery|
+61.2048|-149.834|61.2048|-149.834|firestar media svc|
+61.0956|-149.843|61.0956|-149.843|firestarters of alaska|
+61.1491|-149.809|61.1491|-149.809|firestoppers|
+61.1981|-149.871|61.1981|-149.871|fireweed 7|
+61.1982|-149.876|61.1982|-149.876|fireweed acupuncture herb|
+61.1924|-149.909|61.1924|-149.909|fireweed health care inc|
+61.2204|-149.728|61.2204|-149.728|fireweed lodge at lake creek|
+61.196|-149.864|61.196|-149.864|fireweed manor|
+61.1987|-149.889|61.1987|-149.889|fireweed mart|
+61.1984|-149.897|61.1984|-149.897|fireweed shoe repair|
+61.1897|-149.898|61.1897|-149.898|fireweed tuxedo|
+61.1228|-149.81|61.1228|-149.81|first & lasting impressions|
+61.1649|-149.881|61.1649|-149.881|first alaska contractors inc|
+61.1934|-149.887|61.1934|-149.887|first alaska mortgage|
+61.2164|-149.892|61.2164|-149.892|first alaskans institute|
+61.1932|-149.886|61.1932|-149.886|first american title ins co|
+61.2072|-149.888|61.2072|-149.888|first assembly of god church|
+61.2125|-149.904|61.2125|-149.904|first baptist church|
+61.2142|-149.806|61.2142|-149.806|first call media productions|
+61.1866|-149.923|61.1866|-149.923|first care medical ctrs|
+61.1089|-149.857|61.1089|-149.857|first choice realty & property|
+61.1024|-149.853|61.1024|-149.853|first choice roofing|
+61.1891|-149.906|61.1891|-149.906|do-r-best forestry|
+61.1891|-149.906|61.1891|-149.906|painting by english|
+61.1891|-149.906|61.1891|-149.906|houston painting co|
+61.1891|-149.906|61.1891|-149.906|berts mechnaical|
+61.1891|-149.906|61.1891|-149.906|industrial boiler & controls|
+61.1891|-149.906|61.1891|-149.906|foremans inc|
+61.1891|-149.906|61.1891|-149.906|alcan electrical & engineering|
+61.1891|-149.906|61.1891|-149.906|denali electric co|
+61.1891|-149.906|61.1891|-149.906|alaska infrred asp restoration|
+61.1891|-149.906|61.1891|-149.906|arnell icm company|
+61.1891|-149.906|61.1891|-149.906|pacific partition systems|
+61.1891|-149.906|61.1891|-149.906|associated professional entps|
+61.1891|-149.906|61.1891|-149.906|alcan general inc|
+61.1891|-149.906|61.1891|-149.906|asrc constructors inc|
+61.1891|-149.906|61.1891|-149.906|john-wayne construction co|
+61.1891|-149.906|61.1891|-149.906|underfoot enterprises|
+61.1891|-149.906|61.1891|-149.906|advance door systems inc|
+61.1891|-149.906|61.1891|-149.906|doorman service company|
+61.1891|-149.906|61.1891|-149.906|rdd asphalt restorations|
+61.1891|-149.906|61.1891|-149.906|l k general contractor|
+61.1891|-149.906|61.1891|-149.906|wiker roofing co|
+61.1891|-149.906|61.1891|-149.906|damco paving corp|
+61.1891|-149.906|61.1891|-149.906|international steel erectors|
+61.1891|-149.906|61.1891|-149.906|falcon masonry|
+61.1891|-149.906|61.1891|-149.906|wasler specialties|
+61.1891|-149.906|61.1891|-149.906|northeaton plastering|
+61.1891|-149.906|61.1891|-149.906|a1 fence llc|
+61.1891|-149.906|61.1891|-149.906|hickel construction & engrg|
+61.1891|-149.906|61.1891|-149.906|taco-loco products inc|
+61.1891|-149.906|61.1891|-149.906|je jones llc|
+61.1891|-149.906|61.1891|-149.906|alaska yellowpagescom inc|
+61.1891|-149.906|61.1891|-149.906|alaska tab & bind inc|
+61.1891|-149.906|61.1891|-149.906|sourdough studio|
+61.1891|-149.906|61.1891|-149.906|tundra times|
+61.1891|-149.906|61.1891|-149.906|travelhost of alaska|
+64.8541|-147.813|64.8541|-147.813|us land management bureau|
+64.8414|-147.606|64.8414|-147.606|us land management bureau|
+64.8371|-147.746|64.8371|-147.746|us law enforcement svc|
+64.8151|-147.707|64.8151|-147.707|us marshal|
+64.8371|-147.746|64.8371|-147.746|us migratory bird management|
+64.849|-147.813|64.849|-147.813|us mineral resources program|
+64.8425|-147.724|64.8425|-147.724|us passport agency|
+64.8425|-147.724|64.8425|-147.724|us post office|
+64.849|-147.826|64.849|-147.826|us post office|
+64.815|-147.882|64.815|-147.882|us post office|
+64.8906|-147.628|64.8906|-147.628|us post office|
+64.8276|-147.639|64.8276|-147.639|us post office|
+64.8461|-147.813|64.8461|-147.813|us rural development|
+64.8281|-147.812|64.8281|-147.812|us transportation dept|
+64.8377|-147.718|64.8377|-147.718|us travel|
+64.8417|-147.718|64.8417|-147.718|us vocational rehabilitation|
+64.8145|-147.772|64.8145|-147.772|us water resources|
+64.8541|-147.813|64.8541|-147.813|us water resources div|
+64.8169|-147.779|64.8169|-147.779|usa today|
+64.8378|-147.71|64.8378|-147.71|usgsa public building svc|
+64.8436|-147.722|64.8436|-147.722|usibelli coal mine inc|
+64.9401|-147.402|64.9401|-147.402|usibelli coal mine kbc|
+64.8426|-147.719|64.8426|-147.719|uskh architects engineers|
+64.8534|-147.811|64.8534|-147.811|utility services of alaska|
+64.9341|-147.928|64.9341|-147.928|v f grace inc|
+64.8393|-147.72|64.8393|-147.72|vagabond travel|
+64.8406|-147.731|64.8406|-147.731|valerie m therrien atty-law pc|
+64.9281|-147.865|64.9281|-147.865|vallata|
+64.8371|-147.716|64.8371|-147.716|valley kids|
+64.9414|-147.841|64.9414|-147.841|valley landscaping|
+64.8363|-147.79|64.8363|-147.79|valley of blessing church|
+64.8371|-147.716|64.8371|-147.716|valley ventures|
+64.8591|-147.917|64.8591|-147.917|valley visions consulting inc|
+64.8363|-147.79|64.8363|-147.79|value village|
+55.3801|-131.682|55.3801|-131.682|wolfs concrete finishing|
+55.3073|-131.528|55.3073|-131.528|whiteside enterprises|
+55.3801|-131.682|55.3801|-131.682|flooring services|
+55.3601|-131.681|55.3601|-131.681|floor decor and more|
+55.3279|-131.613|55.3279|-131.613|ohmers renderings|
+55.3129|-131.588|55.3129|-131.588|carson construction|
+55.3801|-131.682|55.3801|-131.682|a a a pest pros|
+55.3801|-131.682|55.3801|-131.682|aurora roofing|
+55.4299|-131.789|55.4299|-131.789|channel seamless gutter roofg|
+55.3801|-131.682|55.3801|-131.682|variety painting & roofing|
+55.3507|-131.671|55.3507|-131.671|hometown flooring|
+55.3801|-131.682|55.3801|-131.682|southeast carpets & draperies|
+55.3394|-131.636|55.3394|-131.636|channel electric inc|
+55.3801|-131.682|55.3801|-131.682|ever electric inc|
+55.3103|-131.582|55.3103|-131.582|jacks painting|
+55.3801|-131.682|55.3801|-131.682|larry d painter|
+55.3801|-131.682|55.3801|-131.682|trans alaska restaurant eqp|
+55.3801|-131.682|55.3801|-131.682|jacobson heating & rfrgn|
+55.3801|-131.682|55.3801|-131.682|ketchikan mechanical inc|
+55.3801|-131.682|55.3801|-131.682|southast fire prtection contrs|
+55.3424|-131.634|55.3424|-131.634|bob hulse plumbing inc|
+55.4097|-131.729|55.4097|-131.729|southeast construction inc|
+55.3801|-131.682|55.3801|-131.682|southeast backhoe service inc|
+55.3801|-131.682|55.3801|-131.682|ljs & sons|
+55.3462|-131.658|55.3462|-131.658|captains quarters & breakfast|
+55.3801|-131.682|55.3801|-131.682|pacific log & lumber ltd|
+55.3801|-131.682|55.3801|-131.682|phoenix logging co a partnr|
+55.3801|-131.682|55.3801|-131.682|ketchikan cutting co|
+55.3801|-131.682|55.3801|-131.682|gildersleeve logging co inc|
+55.3801|-131.682|55.3801|-131.682|northwest logging inc|
+55.3801|-131.682|55.3801|-131.682|evergreen timber llc|
+55.3801|-131.682|55.3801|-131.682|helicopter services inc|
+55.3801|-131.682|55.3801|-131.682|miller inc|
+61.5856|-149.316|61.5856|-149.316|alaska water conditioning inc|
+61.5781|-149.429|61.5781|-149.429|alaska waterski ctr|
+61.6141|-149.457|61.6141|-149.457|alaska weld|
+61.5801|-149.461|61.5801|-149.461|alaska well & pump|
+61.9071|-150.067|61.9071|-150.067|alaska winter excursions|
+61.5928|-149.392|61.5928|-149.392|alaska woodworks|
+61.6141|-149.457|61.6141|-149.457|alaska wool products|
+61.5792|-149.36|61.5792|-149.36|alaska's best satellite|
+61.6049|-149.463|61.6049|-149.463|alaska's cabin fever relief|
+61.6099|-149.328|61.6099|-149.328|alaska's nautical training|
+61.5531|-149.651|61.5531|-149.651|alaska's trails & tails|
+61.6141|-149.457|61.6141|-149.457|alaskamls.com|
+61.6141|-149.457|61.6141|-149.457|alaskan bail bonds|
+61.5531|-149.651|61.5531|-149.651|alaskan colors|
+61.6141|-149.457|61.6141|-149.457|alaskan distributors|
+61.7321|-150.12|61.7321|-150.12|alaskan dream espresso|
+61.6141|-149.457|61.6141|-149.457|alaskan electrical svc|
+61.582|-149.441|61.582|-149.441|alaskan family dental ctr|
+61.5818|-149.44|61.5818|-149.44|alaskan gold rush jewelers|
+61.6141|-149.457|61.6141|-149.457|alaskan hospitality baskets|
+61.7321|-150.12|61.7321|-150.12|alaskan host bed & breakfast|
+61.6308|-149.415|61.6308|-149.415|alaskan industries|
+61.5801|-149.461|61.5801|-149.461|alaskan marine documentation|
+61.5811|-149.444|61.5811|-149.444|alaskan musher|
+61.6521|-149.92|61.6521|-149.92|alaskan sled dogs|
+61.6141|-149.307|61.6141|-149.307|alaskan splendor b & b limo|
+61.6168|-149.328|61.6168|-149.328|alaskan tnt excavation|
+61.5714|-149.381|61.5714|-149.381|alaskan view motel|
+61.6521|-149.92|61.6521|-149.92|alaskian custom expeditions|
+61.5858|-149.376|61.5858|-149.376|alcan electric inc|
+61.5822|-149.463|61.5822|-149.463|all about herbs|
+61.5861|-149.303|61.5861|-149.303|all creatures veterinary clnc|
+61.5821|-149.438|61.5821|-149.438|all i saw cookware|
+61.6281|-149.338|61.6281|-149.338|all phase roofing|
+61.5771|-149.335|61.5771|-149.335|all star child care|
+61.5917|-149.464|61.5917|-149.464|all star construction|
+61.1927|-149.86|61.1927|-149.86|first christian church|
+61.1879|-149.85|61.1879|-149.85|first christian meth episc chr|
+61.1814|-149.849|61.1814|-149.849|first church of god|
+61.2104|-149.892|61.2104|-149.892|first church of the nazarene|
+61.2093|-149.903|61.2093|-149.903|first church-christ scientist|
+61.1481|-149.829|61.1481|-149.829|first class auto body|
+61.2161|-149.876|61.2161|-149.876|first class painting|
+61.1972|-149.75|61.1972|-149.75|first cme church|
+61.2066|-149.887|61.2066|-149.887|first command financial plan|
+61.2161|-149.876|61.2161|-149.876|first commodity traders/maxxis|
+61.195|-149.834|61.195|-149.834|first congregational church|
+61.2038|-149.808|61.2038|-149.808|first health svc corp|
+61.1228|-149.862|61.1228|-149.862|first interstate bank-alaska|
+61.2005|-149.785|61.2005|-149.785|first korean baptist church|
+61.181|-149.825|61.181|-149.825|first lms samoan church|
+61.1867|-149.919|61.1867|-149.919|first maintenance co|
+61.1955|-149.911|61.1955|-149.911|first maintenance inc|
+61.1089|-149.858|61.1089|-149.858|first medical ctr|
+61.1805|-149.889|61.1805|-149.889|first mental health|
+61.1534|-149.985|61.1534|-149.985|first missionary baptist chr|
+61.1901|-149.911|61.1901|-149.911|first mortgage inc|
+61.2122|-149.734|61.2122|-149.734|first national bank|
+61.1951|-149.906|61.1951|-149.906|first national bank|
+61.2043|-149.869|61.2043|-149.869|first national bank|
+61.2036|-149.869|61.2036|-149.869|first national bank|
+61.1883|-149.886|61.1883|-149.886|first national bank|
+61.2141|-149.864|61.2141|-149.864|first national bank|
+61.1955|-149.782|61.1955|-149.782|first national bank|
+61.2183|-149.894|61.2183|-149.894|first national bank|
+61.1417|-149.864|61.1417|-149.864|first national bank|
+61.1883|-149.883|61.1883|-149.883|first national bank alaska|
+61.2161|-149.876|61.2161|-149.876|first national bank jobline|
+61.2193|-149.869|61.2193|-149.869|first native baptist church|
+61.2191|-149.888|61.2191|-149.888|seward phoenix log|
+61.2191|-149.888|61.2191|-149.888|greatland graphics|
+61.2191|-149.888|61.2191|-149.888|conocophillips alaska inc|
+61.2191|-149.888|61.2191|-149.888|tonys enterprises|
+61.2191|-149.888|61.2191|-149.888|counter tops by rejio|
+61.2191|-149.888|61.2191|-149.888|fletchers masonry|
+61.2191|-149.888|61.2191|-149.888|anchor roofing construction|
+61.2191|-149.888|61.2191|-149.888|seamless flooring systems|
+61.2191|-149.888|61.2191|-149.888|edt security service|
+61.2191|-149.888|61.2191|-149.888|rock creek mining company|
+61.2191|-149.888|61.2191|-149.888|fairweather incorporated|
+61.2191|-149.888|61.2191|-149.888|swalling construction co ofc|
+61.1891|-149.906|61.1891|-149.906|posh house|
+61.2191|-149.888|61.2191|-149.888|freespan system inc|
+61.2191|-149.888|61.2191|-149.888|swank construction|
+61.1101|-149.857|61.1101|-149.857|guitreaus technical|
+61.1101|-149.857|61.1101|-149.857|casa llc|
+61.1101|-149.857|61.1101|-149.857|b & a mobile mix concrete|
+61.1101|-149.857|61.1101|-149.857|glacier chain supplies inc|
+61.1101|-149.857|61.1101|-149.857|jennies workshop inc|
+61.1101|-149.857|61.1101|-149.857|aaa american sign and lighting|
+61.1101|-149.857|61.1101|-149.857|patrick faherty|
+61.1101|-149.857|61.1101|-149.857|tom mortensen & assoc|
+61.1101|-149.857|61.1101|-149.857|bethel services inc|
+61.1101|-149.857|61.1101|-149.857|denali general contractors|
+61.1101|-149.857|61.1101|-149.857|h watt & scott inc|
+61.1101|-149.857|61.1101|-149.857|premier cnstr surveys llc|
+61.1101|-149.857|61.1101|-149.857|johnson construction|
+61.1101|-149.857|61.1101|-149.857|thomas development inc|
+61.1101|-149.857|61.1101|-149.857|t & m construction|
+61.1101|-149.857|61.1101|-149.857|rockford corporation|
+61.1101|-149.857|61.1101|-149.857|tam construction inc|
+61.1101|-149.857|61.1101|-149.857|crown pointe inc|
+61.1101|-149.857|61.1101|-149.857|arctic residential res corp|
+64.8591|-147.917|64.8591|-147.917|van wyhe rogers|
+64.8273|-147.715|64.8273|-147.715|variety motors|
+64.8281|-147.715|64.8281|-147.715|vector marketing corp|
+64.8552|-147.763|64.8552|-147.763|vend-alaska|
+64.8481|-147.684|64.8481|-147.684|verizon information svc|
+64.8522|-147.773|64.8522|-147.773|vertex insulation|
+64.8467|-147.703|64.8467|-147.703|veterans of foreign wars|
+64.8963|-147.662|64.8963|-147.662|vicki rayburn music|
+64.8372|-147.796|64.8372|-147.796|victorian parlor antiques|
+64.8312|-147.716|64.8312|-147.716|victory christian faith ctr|
+64.811|-147.71|64.811|-147.71|video machine inc|
+64.8971|-147.663|64.8971|-147.663|village mobile home park|
+64.85|-147.699|64.85|-147.699|vip dry cleaners & lndrmt|
+64.8389|-147.743|64.8389|-147.743|virgie's sari-sari|
+64.781|-148|64.781|-148|vision quest|
+64.8377|-147.718|64.8377|-147.718|vista travel inc|
+64.8248|-147.886|64.8248|-147.886|vital signs|
+64.818|-147.679|64.818|-147.679|vocational rehabilitation dept|
+64.8271|-147.79|64.8271|-147.79|voluntary taxes|
+64.8451|-147.812|64.8451|-147.812|volunteer action ctr|
+64.8311|-147.729|64.8311|-147.729|volunteers in policing|
+64.8454|-147.855|64.8454|-147.855|voorhees concrete cutting|
+64.8302|-147.744|64.8302|-147.744|george r vrablik md|
+64.8113|-147.91|64.8113|-147.91|w a inc|
+64.8271|-147.789|64.8271|-147.789|wagner excavating|
+64.8451|-147.698|64.8451|-147.698|wakefield builders|
+64.8586|-147.69|64.8586|-147.69|wal-mart supercenter|
+64.8507|-147.702|64.8507|-147.702|waldenbooks|
+64.8371|-147.716|64.8371|-147.716|wales tec inc|
+64.8449|-147.743|64.8449|-147.743|walkada apartments|
+64.8475|-147.706|64.8475|-147.706|wallace wallace & wakefield|
+64.8452|-147.714|64.8452|-147.714|walleri law offices|
+64.8125|-147.787|64.8125|-147.787|wallner excavating inc|
+55.3801|-131.682|55.3801|-131.682|usa today|
+55.3801|-131.682|55.3801|-131.682|sun glass designs|
+55.3801|-131.682|55.3801|-131.682|austin powder company|
+55.3801|-131.682|55.3801|-131.682|tongass substance screening|
+55.3801|-131.682|55.3801|-131.682|royalty co|
+55.3801|-131.682|55.3801|-131.682|rons plumbing & heating|
+55.3599|-131.687|55.3599|-131.687|candle lady|
+55.4381|-131.803|55.4381|-131.803|fluke signs north|
+55.3751|-131.718|55.3751|-131.718|sign pro of juneau|
+55.3421|-131.641|55.3421|-131.641|blue heron designs|
+55.3408|-131.64|55.3408|-131.64|gateway games|
+55.2978|-131.534|55.2978|-131.534|nathan jackson|
+55.3801|-131.682|55.3801|-131.682|alaska ship & drydock inc|
+55.3801|-131.682|55.3801|-131.682|don cunning|
+55.3421|-131.622|55.3421|-131.622|aero services|
+55.3801|-131.682|55.3801|-131.682|hanger|
+55.3801|-131.682|55.3801|-131.682|columbia helicopters inc|
+55.3801|-131.682|55.3801|-131.682|homestead skiffs llc|
+55.3267|-131.523|55.3267|-131.523|n picific tecn inc|
+55.3522|-131.685|55.3522|-131.685|ketchikan ranger district|
+55.5351|-133.014|55.5351|-133.014|b-3 contractors inc|
+55.5351|-133.014|55.5351|-133.014|phoenix logging company|
+55.5351|-133.014|55.5351|-133.014|prince wales electric and r|
+55.5351|-133.014|55.5351|-133.014|toms burner service|
+55.5351|-133.014|55.5351|-133.014|p & h construction|
+55.4691|-132.855|55.4691|-132.855|ziegler construction|
+55.5511|-133.081|55.5511|-133.081|rainbird drilling inc|
+55.5351|-133.014|55.5351|-133.014|larry lee trumble|
+55.5531|-133.097|55.5531|-133.097|maria elena f/v|
+55.5351|-133.014|55.5351|-133.014|wildfish co|
+55.4726|-131.793|55.4726|-131.793|carson marine charters inc|
+55.3801|-131.682|55.3801|-131.682|alaska general seafood|
+55.3801|-131.682|55.3801|-131.682|norquest seafoods inc|
+55.3801|-131.682|55.3801|-131.682|ocean point fisheries|
+61.5745|-149.562|61.5745|-149.562|all things wild taxidermy|
+61.7321|-150.12|61.7321|-150.12|all valley svc|
+61.5421|-149.419|61.5421|-149.419|all ways storage & rental co|
+61.5722|-149.702|61.5722|-149.702|all-n-1 svc|
+61.5743|-149.405|61.5743|-149.405|allen & petersen appliance ctr|
+61.578|-149.441|61.578|-149.441|allied communications|
+61.5826|-149.427|61.5826|-149.427|aloha dental care|
+61.5586|-149.351|61.5586|-149.351|alpenview medical svc|
+61.5844|-149.442|61.5844|-149.442|alpha counseling & education|
+61.5817|-149.472|61.5817|-149.472|alpine chiropractic ctr|
+61.6141|-149.457|61.6141|-149.457|alpine pure air & water prdcts|
+61.5761|-149.602|61.5761|-149.602|alpine septic pumping inc|
+61.6141|-149.457|61.6141|-149.457|alpine technologies|
+61.58|-149.434|61.58|-149.434|aluminum works|
+61.6002|-149.429|61.6002|-149.429|alyeska technologies a tech|
+61.569|-149.347|61.569|-149.347|ambassador window cleaning|
+61.5812|-149.448|61.5812|-149.448|america's lending group|
+61.6141|-149.457|61.6141|-149.457|american dream construction|
+61.6141|-149.457|61.6141|-149.457|american legion|
+61.6141|-149.457|61.6141|-149.457|american legion bingo|
+61.5969|-149.367|61.5969|-149.367|american marine hyperbarics|
+61.58|-149.4|61.58|-149.4|american park & sell|
+61.5848|-149.445|61.5848|-149.445|american red cross|
+61.5811|-149.444|61.5811|-149.444|american satellite|
+61.5733|-149.389|61.5733|-149.389|american tattoo|
+61.6141|-149.457|61.6141|-149.457|amigalaska productions|
+61.5952|-149.436|61.5952|-149.436|amvets post|
+61.5421|-149.419|61.5421|-149.419|amway distributors|
+61.5783|-149.362|61.5783|-149.362|amy henry's heated storage|
+61.7321|-150.12|61.7321|-150.12|anchor chiropractic healing|
+61.5691|-149.328|61.5691|-149.328|anchorage daily news|
+61.6141|-149.457|61.6141|-149.457|anchorage drywall|
+61.5819|-149.3|61.5819|-149.3|anchorage handyman svc|
+61.6431|-149.289|61.6431|-149.289|anchorage tank & welding|
+61.6431|-149.292|61.6431|-149.292|anchorage tank & welding inc|
+61.6228|-149.313|61.6228|-149.313|anchors away cruise outlet|
+61.2125|-149.894|61.2125|-149.894|first presbyterian church|
+61.1879|-149.886|61.1879|-149.886|first priority mortgage|
+61.1594|-149.835|61.1594|-149.835|first samoan church|
+61.2123|-149.854|61.2123|-149.854|first samoan stand by faith|
+61.1951|-149.945|61.1951|-149.945|first samoan united methodist|
+61.2138|-149.856|61.2138|-149.856|first spanish assemblies-god|
+61.1255|-149.864|61.1255|-149.864|first student|
+61.2066|-149.808|61.2066|-149.808|first tabernacle|
+61.2138|-149.896|61.2138|-149.896|first united methodist church|
+61.2161|-149.876|61.2161|-149.876|fish|
+61.1594|-149.888|61.1594|-149.888|fish & game dept|
+61.1806|-149.775|61.1806|-149.775|fish & wildlife protection|
+61.1101|-149.857|61.1101|-149.857|fish alaska magazine|
+61.1541|-149.958|61.1541|-149.958|fish site|
+61.1268|-149.947|61.1268|-149.947|fish talk consulting|
+61.0931|-149.785|61.0931|-149.785|fish wish charters|
+61.1901|-149.892|61.1901|-149.892|fishboy outfitters|
+61.2183|-149.889|61.2183|-149.889|fisherman's express|
+61.2164|-149.882|61.2164|-149.882|fishing lodge inc|
+61.2161|-149.876|61.2161|-149.876|fit for health|
+61.219|-149.792|61.219|-149.792|fitzgerald photography|
+61.2188|-149.892|61.2188|-149.892|five fifteen club|
+61.2143|-149.837|61.2143|-149.837|five one victor|
+61.1608|-149.835|61.1608|-149.835|five star automotive repair|
+61.1886|-149.944|61.1886|-149.944|five star entertainment|
+61.1201|-149.89|61.1201|-149.89|five star insultators inc|
+61.1201|-149.89|61.1201|-149.89|five star roofing|
+61.1585|-149.872|61.1585|-149.872|fivestarappliance & electronic|
+61.195|-149.892|61.195|-149.892|flash tan|
+61.0895|-149.694|61.0895|-149.694|flattop technical svc|
+61.2201|-149.831|61.2201|-149.831|fleenor paper co|
+61.1663|-149.867|61.1663|-149.867|fleet liquidators|
+61.2138|-149.906|61.2138|-149.906|hugh w fleischer|
+61.2193|-149.902|61.2193|-149.902|hugh w fleischer|
+61.1201|-149.89|61.1201|-149.89|fletcher's masonry|
+61.1601|-149.984|61.1601|-149.984|flight alaska inc|
+61.1101|-149.857|61.1101|-149.857|bn contracting inc|
+61.1101|-149.857|61.1101|-149.857|twa home repair service llc|
+61.1101|-149.857|61.1101|-149.857|faltz deck & patio builders|
+61.1101|-149.857|61.1101|-149.857|four seasons|
+61.1101|-149.857|61.1101|-149.857|timberline homes inc|
+61.1101|-149.857|61.1101|-149.857|janigo construction|
+61.1101|-149.857|61.1101|-149.857|kopitch construction|
+61.1101|-149.857|61.1101|-149.857|north slopes inspection dist|
+61.1101|-149.857|61.1101|-149.857|tuboscoope|
+61.1101|-149.857|61.1101|-149.857|parker drilling company|
+61.1101|-149.857|61.1101|-149.857|record center|
+61.1101|-149.857|61.1101|-149.857|julian creek mining|
+61.1101|-149.857|61.1101|-149.857|jon|
+61.1101|-149.857|61.1101|-149.857|salmon express|
+61.1101|-149.857|61.1101|-149.857|interceptor owners fv mu|
+61.1101|-149.857|61.1101|-149.857|alaska deep sea adventure|
+61.1101|-149.857|61.1101|-149.857|northern greens|
+61.1101|-149.857|61.1101|-149.857|arctic green llc|
+61.1101|-149.857|61.1101|-149.857|alpine tree & brush cutng svcs|
+61.1101|-149.857|61.1101|-149.857|landscape resources corp|
+61.1101|-149.857|61.1101|-149.857|smith cable systems inc|
+61.1101|-149.857|61.1101|-149.857|t a b electric inc|
+61.1101|-149.857|61.1101|-149.857|bomax inc|
+61.1101|-149.857|61.1101|-149.857|arctic lights electric inc|
+61.1101|-149.857|61.1101|-149.857|rams horn electric inc|
+61.1101|-149.857|61.1101|-149.857|toklat electric inc|
+61.1101|-149.857|61.1101|-149.857|cassidys plumbing & htg llc|
+61.1101|-149.857|61.1101|-149.857|aarow pump & well service|
+61.1101|-149.857|61.1101|-149.857|aurora refrigeration inc|
+61.1101|-149.857|61.1101|-149.857|denali fire protection|
+61.1101|-149.857|61.1101|-149.857|noble mechanical|
+61.1101|-149.857|61.1101|-149.857|ancroge roofing|
+61.1101|-149.857|61.1101|-149.857|premier roofing co|
+61.1101|-149.857|61.1101|-149.857|first choice roofing|
+61.1101|-149.857|61.1101|-149.857|twin peaks roofing|
+64.8459|-147.759|64.8459|-147.759|wallpaper warehouse|
+64.8426|-147.725|64.8426|-147.725|walsh kelliher & sharp|
+64.9401|-147.402|64.9401|-147.402|laurie walton|
+64.9401|-147.402|64.9401|-147.402|lonnie ward|
+64.8331|-147.647|64.8331|-147.647|wareforce inc|
+64.8371|-147.716|64.8371|-147.716|sheree warner|
+64.8574|-147.759|64.8574|-147.759|wartes floor covering|
+64.836|-147.727|64.836|-147.727|warwick & schikora|
+64.8132|-147.76|64.8132|-147.76|waste management-star|
+64.8451|-147.96|64.8451|-147.96|water wagon llc|
+64.8891|-147.851|64.8891|-147.851|watercolors by nikki kinne|
+64.8151|-147.707|64.8151|-147.707|waterman|
+64.8363|-147.803|64.8363|-147.803|wayland baptist university|
+64.8298|-147.611|64.8298|-147.611|wayland baptist university|
+64.8989|-147.701|64.8989|-147.701|wayne's air supply|
+64.8371|-147.716|64.8371|-147.716|wdtm|
+64.8147|-147.706|64.8147|-147.706|weaver brothers inc|
+64.8451|-147.812|64.8451|-147.812|web weavers|
+64.8386|-147.788|64.8386|-147.788|weber stump grinding|
+64.8372|-147.768|64.8372|-147.768|mark weber|
+64.8151|-147.707|64.8151|-147.707|webmasters|
+64.843|-147.722|64.843|-147.722|pamela w webster|
+64.8426|-147.725|64.8426|-147.725|wedbush morgan securities|
+64.8615|-147.723|64.8615|-147.723|wedgewood resort|
+64.8694|-147.067|64.8694|-147.067|weidner electric rebuilders|
+64.8404|-147.724|64.8404|-147.724|weights & measures dept|
+64.9686|-147.577|64.9686|-147.577|welding shop|
+64.8497|-147.732|64.8497|-147.732|well street art co|
+64.8861|-147.587|64.8861|-147.587|weller elementary school|
+64.8508|-147.703|64.8508|-147.703|wells fargo bank|
+64.841|-147.72|64.841|-147.72|wells fargo bank|
+64.8375|-147.721|64.8375|-147.721|wells fargo bank|
+64.849|-147.813|64.849|-147.813|wells fargo bank|
+55.3801|-131.682|55.3801|-131.682|rainbird drilling inc|
+55.3801|-131.682|55.3801|-131.682|trident seafoods corporation|
+55.3603|-131.702|55.3603|-131.702|hals equipment & supplies|
+55.7511|-132.865|55.7511|-132.865|southern se reg aqucltre assn|
+55.7511|-132.865|55.7511|-132.865|miguel n marin|
+60.3311|-151.284|60.3311|-151.284|heath r duncan|
+60.3311|-151.284|60.3311|-151.284|coho partnership|
+60.3311|-151.284|60.3311|-151.284|tarma grooming|
+60.4341|-151.283|60.4341|-151.283|whisper wind farm & stables|
+60.3311|-151.284|60.3311|-151.284|coopers wunded bear farms kenl|
+60.3311|-151.284|60.3311|-151.284|webb fish co|
+60.3311|-151.284|60.3311|-151.284|moerliens general contractor|
+60.3311|-151.284|60.3311|-151.284|rozak excavating and cnstr|
+60.3311|-151.284|60.3311|-151.284|sundance construction company|
+60.4311|-151.286|60.4311|-151.286|online systems|
+60.3311|-151.284|60.3311|-151.284|matlock ltd|
+60.3311|-151.284|60.3311|-151.284|silver fox slippers|
+60.3311|-151.284|60.3311|-151.284|salamander pads incorporated|
+60.3311|-151.284|60.3311|-151.284|gates construction inc|
+60.3191|-151.296|60.3191|-151.296|smileys custom contracting|
+60.3311|-151.284|60.3311|-151.284|kasilof construction company|
+60.3311|-151.284|60.3311|-151.284|earth works|
+60.3311|-151.284|60.3311|-151.284|salamander exhaust wraps inc|
+60.5563|-151.241|60.5563|-151.241|olgas jewelry|
+60.6331|-151.163|60.6331|-151.163|bj services company|
+60.6331|-151.163|60.6331|-151.163|national oilwell|
+60.6331|-151.163|60.6331|-151.163|industrial water treatment|
+60.6331|-151.163|60.6331|-151.163|h p parts & machines|
+60.5551|-151.245|60.5551|-151.245|satech|
+60.6331|-151.163|60.6331|-151.163|artic instilation & coating|
+60.5506|-151.141|60.5506|-151.141|beaver creek docks inc|
+60.5681|-151.281|60.5681|-151.281|porta blast and industrial|
+60.5465|-151.147|60.5465|-151.147|nicks ironworks|
+60.6331|-151.163|60.6331|-151.163|metal magic|
+60.5578|-151.116|60.5578|-151.116|cook inlet stone products|
+61.6182|-149.385|61.6182|-149.385|anderson door|
+61.6141|-149.457|61.6141|-149.457|angel haven lodge|
+61.5765|-149.407|61.5765|-149.407|angel nails|
+61.6091|-149.77|61.6091|-149.77|animal food warehouse|
+61.5821|-149.438|61.5821|-149.438|annabells bargain books|
+61.6183|-149.373|61.6183|-149.373|antonovich contracting|
+61.6131|-149.403|61.6131|-149.403|apd enterprise|
+61.6352|-149.265|61.6352|-149.265|apex surveying & consulting|
+61.5751|-149.645|61.5751|-149.645|apostolic worship ctr|
+61.6168|-149.374|61.6168|-149.374|appraisal co of mat-su|
+61.5939|-149.43|61.5939|-149.43|aqua vac|
+61.5792|-149.495|61.5792|-149.495|arby's|
+61.6141|-149.457|61.6141|-149.457|archibald drilling svc|
+61.5829|-149.448|61.5829|-149.448|architects alaska|
+61.5938|-149.387|61.5938|-149.387|arctic builders source inc|
+61.5849|-149.38|61.5849|-149.38|arctic circle club of n amer|
+61.5729|-149.389|61.5729|-149.389|arctic custom guns|
+61.5823|-149.462|61.5823|-149.462|arctic flame|
+61.57|-149.331|61.57|-149.331|arctic fox taxidermy studio|
+61.5821|-149.438|61.5821|-149.438|arctic rose fine fashions|
+61.6141|-149.457|61.6141|-149.457|arctic siding & supply|
+61.6115|-149.331|61.6115|-149.331|arctic sun distributors|
+61.6091|-149.77|61.6091|-149.77|arctic transmissions|
+61.6141|-149.457|61.6141|-149.457|walter r armstrong dds|
+61.5826|-149.427|61.5826|-149.427|armured usa inc|
+61.5976|-149.366|61.5976|-149.366|army national guard|
+61.5976|-149.366|61.5976|-149.366|army national guard recruiting|
+61.5531|-149.651|61.5531|-149.651|arnold gagnon contracting|
+61.5531|-149.651|61.5531|-149.651|arrowhead environmental svc|
+61.5883|-149.456|61.5883|-149.456|artistry of movement|
+61.6249|-149.435|61.6249|-149.435|ask educational computer|
+61.6141|-149.457|61.6141|-149.457|aspen construction|
+61.5709|-149.452|61.5709|-149.452|assembly of god-wasilla|
+61.6651|-149.465|61.6651|-149.465|attic antiques & collectibles|
+61.5841|-149.441|61.5841|-149.441|attorney's process svc|
+61.1571|-149.883|61.1571|-149.883|flight deck pizza|
+61.2198|-149.733|61.2198|-149.733|flo's pancake house|
+61.1863|-149.976|61.1863|-149.976|floats alaska|
+61.2161|-149.876|61.2161|-149.876|floor max|
+61.2031|-149.749|61.2031|-149.749|floor seasons inc|
+61.1644|-149.897|61.1644|-149.897|floor supply|
+61.1834|-149.866|61.1834|-149.866|flooring designs|
+61.1731|-149.889|61.1731|-149.889|floral expressions|
+61.1089|-149.854|61.1089|-149.854|floral expressions|
+61.1951|-149.916|61.1951|-149.916|floral expressions|
+61.2157|-149.821|61.2157|-149.821|floral expressions|
+61.1372|-149.954|61.1372|-149.954|floral expressions|
+61.1955|-149.737|61.1955|-149.737|floral expressions|
+61.1444|-149.867|61.1444|-149.867|florales|
+61.2178|-149.882|61.2178|-149.882|florcraft carpet one|
+61.1824|-149.849|61.1824|-149.849|flores d'columbia|
+61.2161|-149.876|61.2161|-149.876|florio engineering science|
+61.2022|-149.84|61.2022|-149.84|flowerama|
+61.2099|-149.762|61.2099|-149.762|flowers by june|
+61.1951|-149.84|61.1951|-149.84|flowers for the moment|
+61.1069|-149.859|61.1069|-149.859|floyd's garage|
+61.1635|-149.881|61.1635|-149.881|fluid machinery|
+61.1201|-149.89|61.1201|-149.89|flutissimo|
+61.1902|-149.908|61.1902|-149.908|fly by night club|
+61.2104|-149.857|61.2104|-149.857|fly guys|
+61.1771|-149.97|61.1771|-149.97|fly in restaurant|
+61.1859|-149.976|61.1859|-149.976|fly north air charter|
+61.2171|-149.9|61.2171|-149.9|flying colors|
+61.1162|-149.755|61.1162|-149.755|flying colors inc|
+61.1381|-149.844|61.1381|-149.844|flying dutchman european pstry|
+61.1775|-149.941|61.1775|-149.941|flying machine restaurant|
+61.1938|-149.878|61.1938|-149.878|flypaper|
+61.1571|-149.883|61.1571|-149.883|fmc energy systems|
+61.1541|-149.958|61.1541|-149.958|fnaws|
+61.135|-149.88|61.135|-149.88|fnw/ferguson|
+61.1101|-149.857|61.1101|-149.857|all weather inc|
+61.1101|-149.857|61.1101|-149.857|alaska standard roofing|
+61.1101|-149.857|61.1101|-149.857|meehleis steel of north inc|
+61.1101|-149.857|61.1101|-149.857|m w drilling inc|
+61.1101|-149.857|61.1101|-149.857|alpine drilling & enterprises|
+61.1101|-149.857|61.1101|-149.857|ak sealco|
+61.1101|-149.857|61.1101|-149.857|yukon pumping service inc|
+61.1101|-149.857|61.1101|-149.857|parker masonry & tile|
+61.1101|-149.857|61.1101|-149.857|interior wall systems inc|
+61.1101|-149.857|61.1101|-149.857|far north builders llc|
+61.1101|-149.857|61.1101|-149.857|insulate alaska|
+61.1101|-149.857|61.1101|-149.857|california closet co|
+61.1101|-149.857|61.1101|-149.857|absolute environmental svcs|
+61.1101|-149.857|61.1101|-149.857|lawson roofing & remodeling in|
+61.1101|-149.857|61.1101|-149.857|earths bounty foods llc|
+61.1101|-149.857|61.1101|-149.857|alaska canine cookies inc|
+61.1101|-149.857|61.1101|-149.857|fishermans wild alskan seafood|
+61.1101|-149.857|61.1101|-149.857|alaska glacier refreshments|
+61.2141|-149.864|61.2141|-149.864|northern management services|
+61.2171|-149.9|61.2171|-149.9|mc lin contractors inc|
+61.2171|-149.9|61.2171|-149.9|northern services|
+61.2171|-149.9|61.2171|-149.9|dar-con corporation|
+61.2171|-149.9|61.2171|-149.9|alaska tree doctor|
+61.2171|-149.9|61.2171|-149.9|williwaw publishing co|
+61.2171|-149.9|61.2171|-149.9|anchorage daily news inc|
+61.2171|-149.9|61.2171|-149.9|mcclathchy newspapers inc|
+61.2171|-149.9|61.2171|-149.9|p s publications|
+61.2171|-149.9|61.2171|-149.9|alaska fiberglass solutions|
+61.2171|-149.9|61.2171|-149.9|polaris insulation ltd|
+61.2171|-149.9|61.2171|-149.9|urethane alaska|
+61.2171|-149.9|61.2171|-149.9|dls supply|
+61.2171|-149.9|61.2171|-149.9|stewart construction co|
+61.2171|-149.9|61.2171|-149.9|wruck wade crpting instlltions|
+61.2171|-149.9|61.2171|-149.9|service one heating & cooling|
+64.8487|-147.704|64.8487|-147.704|wells fargo financial|
+64.8486|-147.705|64.8486|-147.705|wells fargo home mortgage inc|
+64.8087|-147.71|64.8087|-147.71|wells repair|
+64.8363|-147.758|64.8363|-147.758|wendy's|
+64.8476|-147.704|64.8476|-147.704|wendy's|
+64.8302|-147.744|64.8302|-147.744|william w wennen md|
+64.8476|-147.812|64.8476|-147.812|todd z wentz dds|
+64.8144|-147.756|64.8144|-147.756|pamela wescott|
+64.8183|-147.778|64.8183|-147.778|west coast distr|
+64.8494|-147.818|64.8494|-147.818|west valley high school|
+64.849|-147.822|64.849|-147.822|west valley vision ctr inc|
+64.8367|-147.716|64.8367|-147.716|west wind construction|
+64.8127|-147.772|64.8127|-147.772|western mechanical inc|
+64.8143|-147.751|64.8143|-147.751|western power & equipment corp|
+64.842|-147.721|64.842|-147.721|western travel|
+64.8403|-147.714|64.8403|-147.714|westmark fairbanks hotel|
+64.8371|-147.881|64.8371|-147.881|westside video|
+64.8151|-147.707|64.8151|-147.707|white birch ent inc|
+64.8151|-147.707|64.8151|-147.707|white environmental consultant|
+64.8541|-147.81|64.8541|-147.81|whole earth grocery & deli|
+64.8878|-147.496|64.8878|-147.496|wholesome touch|
+64.8299|-147.728|64.8299|-147.728|wic food program|
+64.8132|-147.762|64.8132|-147.762|wick constructors inc|
+64.8372|-147.713|64.8372|-147.713|wilbur brothers sheet metal|
+64.7927|-148.036|64.7927|-148.036|wild alaska-ktm-husaberg|
+64.8717|-147.819|64.8717|-147.819|wild pond therapeutic massage|
+64.8527|-147.686|64.8527|-147.686|wild rose performance horse|
+64.8371|-147.811|64.8371|-147.811|wilderness cache ceramics|
+64.846|-147.705|64.846|-147.705|wilderness canoe adventures|
+64.9064|-147.726|64.9064|-147.726|wilderness fishing inc|
+64.9347|-147.56|64.9347|-147.56|wildview cabin vacation home|
+64.8861|-147.677|64.8861|-147.677|wildwood mobile home court|
+64.8302|-147.744|64.8302|-147.744|william h montano inc|
+60.6673|-151.311|60.6673|-151.311|sign works|
+60.6997|-151.38|60.6997|-151.38|kenai neon sign company|
+60.5658|-151.244|60.5658|-151.244|everts air fuel inc|
+60.6331|-151.163|60.6331|-151.163|stans drywall painting|
+60.6331|-151.163|60.6331|-151.163|leuenhagens drywall & paint|
+60.6331|-151.163|60.6331|-151.163|independent concrete entrprs|
+60.6201|-151.332|60.6201|-151.332|spectrum services inc|
+60.5004|-151.276|60.5004|-151.276|kelly electric inc|
+60.6331|-151.163|60.6331|-151.163|kachemak electric co inc|
+60.6331|-151.163|60.6331|-151.163|mount spurr cabinets & mllwk|
+60.6901|-151.321|60.6901|-151.321|northland drilling|
+60.6676|-151.29|60.6676|-151.29|water systems & service|
+60.6331|-151.163|60.6331|-151.163|peninsula drilling|
+60.5632|-151.266|60.5632|-151.266|hansons roofing|
+60.6331|-151.163|60.6331|-151.163|duct or sheetmetal inc|
+60.6331|-151.163|60.6331|-151.163|miller sheet metal & heating|
+60.6509|-151.342|60.6509|-151.342|rezins concrete|
+60.5601|-151.107|60.5601|-151.107|russs construction co|
+60.5123|-151.275|60.5123|-151.275|rutherford floor covering|
+60.6076|-151.325|60.6076|-151.325|grace painting|
+60.6331|-151.163|60.6331|-151.163|k i m c o inc|
+60.5175|-151.235|60.5175|-151.235|mikes plumbing & heating|
+60.5535|-151.108|60.5535|-151.108|whites plumbing & heating|
+60.5614|-151.275|60.5614|-151.275|struempler plumbing|
+60.5647|-151.195|60.5647|-151.195|everything electric|
+60.6331|-151.163|60.6331|-151.163|brooks electric|
+60.6331|-151.163|60.6331|-151.163|cook inlet sign & lighting|
+60.6331|-151.163|60.6331|-151.163|pacific star seafoods inc|
+60.6331|-151.163|60.6331|-151.163|inlet salmon|
+60.6331|-151.163|60.6331|-151.163|salamatof seafoods inc|
+60.6678|-151.31|60.6678|-151.31|alaska metal|
+60.5003|-151.276|60.5003|-151.276|wjr ltd|
+60.6331|-151.163|60.6331|-151.163|efta net hanging|
+61.5855|-149.326|61.5855|-149.326|audio services corp|
+61.5979|-149.437|61.5979|-149.437|aunty cheryl's child care|
+61.6141|-149.457|61.6141|-149.457|aurora academy|
+61.5799|-149.461|61.5799|-149.461|aurora cleaning svc|
+61.5845|-149.337|61.5845|-149.337|aurora dental ctr|
+61.5351|-149.558|61.5351|-149.558|aurora enterprises|
+61.5811|-149.45|61.5811|-149.45|aurora flooring|
+61.5793|-149.442|61.5793|-149.442|aurora imports|
+61.5661|-149.313|61.5661|-149.313|aurora portable toilets|
+61.6141|-149.457|61.6141|-149.457|auto solutions|
+61.5786|-149.332|61.5786|-149.332|autodidactic press|
+61.5811|-149.45|61.5811|-149.45|avant gardens|
+61.5825|-149.429|61.5825|-149.429|avanti corp|
+61.6141|-149.457|61.6141|-149.457|avon independent sales rep|
+61.5531|-149.651|61.5531|-149.651|avon products|
+61.5531|-149.651|61.5531|-149.651|avp distributors|
+61.5698|-149.62|61.5698|-149.62|axberg builders|
+61.6141|-149.457|61.6141|-149.457|b & b farms|
+61.5474|-149.458|61.5474|-149.458|b & e landscaping & excavation|
+61.5825|-149.429|61.5825|-149.429|b & k daniel enterprises inc|
+61.5955|-149.423|61.5955|-149.423|b & r communications|
+61.5801|-149.461|61.5801|-149.461|b d builders|
+61.5785|-149.415|61.5785|-149.415|b howard & assoc|
+61.6402|-149.34|61.6402|-149.34|b original signs|
+61.583|-149.457|61.583|-149.457|back & neck pain relief ctr|
+61.5907|-149.295|61.5907|-149.295|bagels alaska|
+61.6141|-149.457|61.6141|-149.457|bailey construction management|
+61.5933|-149.397|61.5933|-149.397|bailey's backhoe|
+61.6091|-149.77|61.6091|-149.77|bailey's furniture outlet|
+61.577|-149.411|61.577|-149.411|bailey's sleep gallery|
+61.6078|-149.322|61.6078|-149.322|bald mountain landscape co|
+61.6131|-149.397|61.6131|-149.397|bald mountain lumber|
+61.6648|-149.372|61.6648|-149.372|baldy mountain rental|
+61.5531|-149.651|61.5531|-149.651|bales construction|
+61.5811|-149.45|61.5811|-149.45|banana pepper espresso|
+61.583|-149.457|61.583|-149.457|bandwagon skate & snowboard|
+61.1955|-149.9|61.1955|-149.9|focus co|
+61.1844|-149.897|61.1844|-149.897|focus hair salon|
+61.1901|-149.892|61.1901|-149.892|fof communications|
+61.1444|-149.867|61.1444|-149.867|foley & foley|
+61.1955|-149.9|61.1955|-149.9|follett & assoc|
+61.1893|-149.887|61.1893|-149.887|fonte' coffee roaster|
+61.2174|-149.888|61.2174|-149.888|fonte' coffee roaster|
+61.2227|-149.842|61.2227|-149.842|food bank of alaska|
+61.1263|-149.872|61.1263|-149.872|food services of america|
+61.1138|-149.866|61.1138|-149.866|food transportation svc|
+61.2157|-149.821|61.2157|-149.821|foot locker|
+61.2174|-149.888|61.2174|-149.888|foot locker|
+61.1855|-149.868|61.1855|-149.868|foot locker|
+61.1951|-149.873|61.1951|-149.873|foot locker|
+61.1444|-149.867|61.1444|-149.867|foot locker|
+61.1883|-149.735|61.1883|-149.735|foothills self storage|
+61.2161|-149.876|61.2161|-149.876|footprint productions|
+61.2161|-149.876|61.2161|-149.876|for sale by owner alaska|
+61.1893|-149.888|61.1893|-149.888|for sale by owner assistance|
+61.2174|-149.888|61.2174|-149.888|for your cell only|
+61.1914|-149.886|61.1914|-149.886|ford motor credit co|
+61.1541|-149.958|61.1541|-149.958|kurt ford|
+61.1259|-149.717|61.1259|-149.717|forest & land management inc|
+61.2193|-149.902|61.2193|-149.902|forest oil corp|
+61.1879|-149.886|61.1879|-149.886|forestry div|
+61.1372|-149.912|61.1372|-149.912|forget me not assisted living|
+61.1101|-149.857|61.1101|-149.857|forget me not demos|
+61.1791|-149.94|61.1791|-149.94|forget me not hair & nail sln|
+61.2031|-149.749|61.2031|-149.749|forget-me-not business|
+61.1914|-149.877|61.1914|-149.877|forget-me-not rubber stamps|
+61.1944|-149.883|61.1944|-149.883|fortier & mikko|
+61.1923|-149.886|61.1923|-149.886|forty niner transportation inc|
+61.2123|-149.854|61.2123|-149.854|forward amusement co|
+61.2161|-149.876|61.2161|-149.876|fosler law group|
+61.2171|-149.9|61.2171|-149.9|accel fire systems inc|
+61.1125|-149.861|61.1125|-149.861|mr rooter|
+61.1419|-149.896|61.1419|-149.896|picture perfect pntg & drywall|
+61.145|-149.878|61.145|-149.878|power electric & general contg|
+61.1044|-149.865|61.1044|-149.865|alaska refinishers|
+61.1444|-149.876|61.1444|-149.876|ats alaska inc|
+61.1|-149.85|61.1|-149.85|alaska contrs & engineers llc|
+61.1105|-149.861|61.1105|-149.861|alaska dream kitchens|
+61.1286|-149.957|61.1286|-149.957|arctic enterprises|
+61.1201|-149.89|61.1201|-149.89|integrity framing|
+61.112|-149.871|61.112|-149.871|hammer roofing|
+61.1427|-149.864|61.1427|-149.864|gallos|
+61.1212|-149.947|61.1212|-149.947|novakovichs enterprises inc|
+61.1228|-149.939|61.1228|-149.939|blackfrog specialties llp|
+61.1093|-149.871|61.1093|-149.871|firstmark homes corp|
+61.145|-149.878|61.145|-149.878|grose brothers|
+61.1191|-149.871|61.1191|-149.871|swanson general contractors|
+61.1003|-149.856|61.1003|-149.856|sheehan drywall inc|
+61.1113|-149.872|61.1113|-149.872|quality northern drywall|
+61.1084|-149.861|61.1084|-149.861|foam & coating statewide|
+61.144|-149.878|61.144|-149.878|cornerstone tile|
+61.1426|-149.877|61.1426|-149.877|rinos tile & stone|
+61.1173|-149.861|61.1173|-149.861|cm toms carpentry|
+61.1347|-149.914|61.1347|-149.914|gregs carpentry plus|
+61.1291|-149.951|61.1291|-149.951|jelles bent nail|
+61.1116|-149.862|61.1116|-149.862|dreyers of alaska distributor|
+61.144|-149.893|61.144|-149.893|creative kitchen designs inc|
+61.1148|-149.867|61.1148|-149.867|pacific drilling ltd|
+61.1178|-149.88|61.1178|-149.88|digger dans backhoe|
+61.2191|-149.888|61.2191|-149.888|ebco|
+61.2191|-149.888|61.2191|-149.888|anchorage glacier pilots inc|
+61.2191|-149.888|61.2191|-149.888|mini-brute service inc|
+64.8403|-147.784|64.8403|-147.784|william pike consulting|
+64.843|-147.722|64.843|-147.722|william r fairbanks investment|
+64.8421|-147.725|64.8421|-147.725|william r satterberg jr law|
+64.8631|-147.758|64.8631|-147.758|williams express|
+64.8476|-147.704|64.8476|-147.704|williams express|
+64.826|-147.714|64.826|-147.714|williams express|
+64.9401|-147.402|64.9401|-147.402|susan williamson|
+64.8436|-147.722|64.8436|-147.722|willis of alaska inc|
+64.8375|-147.812|64.8375|-147.812|willow creek clothing llc|
+64.8302|-147.744|64.8302|-147.744|willow physical therapy|
+64.8615|-147.722|64.8615|-147.722|willow woods apartments|
+64.8496|-147.718|64.8496|-147.718|wilson & wilson|
+64.8414|-147.713|64.8414|-147.713|wilton adjustment svc|
+64.8426|-147.72|64.8426|-147.72|winfree law office|
+64.8371|-147.716|64.8371|-147.716|mary c wing|
+64.811|-147.71|64.811|-147.71|wingfoot commercial tire systs|
+64.8151|-147.707|64.8151|-147.707|wings|
+64.8098|-147.764|64.8098|-147.764|wire communications inc|
+64.8515|-147.706|64.8515|-147.706|wireless retail inc|
+64.856|-147.809|64.856|-147.809|wok n roll express|
+64.856|-147.809|64.856|-147.809|wok n' roll express|
+64.8488|-147.811|64.8488|-147.811|wolf run restaurant|
+64.856|-147.809|64.856|-147.809|timothy j woller dds|
+64.8331|-147.647|64.8331|-147.647|wolverine supply|
+64.8547|-147.754|64.8547|-147.754|women & children ctr|
+64.8151|-147.707|64.8151|-147.707|women's center|
+64.8516|-147.709|64.8516|-147.709|women's health midwifery svc|
+64.8411|-147.718|64.8411|-147.718|brett m wood|
+64.8818|-147.638|64.8818|-147.638|woodcraft unlimited|
+64.8389|-147.87|64.8389|-147.87|woodriver elementary school|
+64.8596|-147.73|64.8596|-147.73|woodway|
+64.9234|-147.744|64.9234|-147.744|work in progress|
+64.9271|-147.716|64.9271|-147.716|work in progress|
+64.8725|-147.59|64.8725|-147.59|work man|
+60.6331|-151.163|60.6331|-151.163|gamas designs|
+60.5601|-151.186|60.5601|-151.186|nordic log homes|
+60.6331|-151.163|60.6331|-151.163|hall quality builders inc|
+60.5751|-151.311|60.5751|-151.311|agrium us inc|
+60.5681|-151.27|60.5681|-151.27|online systems|
+60.5585|-151.252|60.5585|-151.252|j c penny catalog|
+60.6331|-151.163|60.6331|-151.163|deans peninsula printing|
+60.6331|-151.163|60.6331|-151.163|arctic printing|
+60.6331|-151.163|60.6331|-151.163|peninsula clarion|
+60.5602|-151.244|60.5602|-151.244|printers ink|
+60.6331|-151.163|60.6331|-151.163|lowestein construction|
+60.6331|-151.163|60.6331|-151.163|redoubt/bering pcf joint ventr|
+60.5091|-151.146|60.5091|-151.146|quality asphalt|
+60.6331|-151.163|60.6331|-151.163|commercial auto repr eqp svcs|
+60.5571|-151.062|60.5571|-151.062|peninsula construction inc|
+60.7016|-151.318|60.7016|-151.318|troy built construction|
+60.6331|-151.163|60.6331|-151.163|ace|
+60.6331|-151.163|60.6331|-151.163|brown construction company|
+60.5677|-151.204|60.5677|-151.204|scotts construction|
+60.6331|-151.163|60.6331|-151.163|kenneth r puch construction|
+60.6671|-151.33|60.6671|-151.33|veal construction|
+60.6331|-151.163|60.6331|-151.163|tng builders|
+60.5007|-151.276|60.5007|-151.276|dan sterchi construction|
+60.5579|-151.058|60.5579|-151.058|c & o builders|
+60.6331|-151.163|60.6331|-151.163|alaska family salmon llc|
+60.6331|-151.163|60.6331|-151.163|p a s inc|
+60.6331|-151.163|60.6331|-151.163|unocal corporation|
+60.5552|-151.25|60.5552|-151.25|2go|
+60.5601|-151.287|60.5601|-151.287|2go|
+60.6379|-151.351|60.6379|-151.351|2go|
+60.6331|-151.163|60.6331|-151.163|m a p crude petroleum & natura|
+60.7411|-151.302|60.7411|-151.302|m a p crude petroleum & naturl|
+60.6331|-151.163|60.6331|-151.163|shell|
+60.6331|-151.163|60.6331|-151.163|tesoro|
+60.5011|-151.275|60.5011|-151.275|trail lakes hatchery|
+61.5724|-149.445|61.5724|-149.445|kathleen c barron|
+61.6141|-149.457|61.6141|-149.457|barton electric|
+61.5641|-149.352|61.5641|-149.352|baxter inc|
+61.6041|-149.641|61.6041|-149.641|bear|
+61.5701|-149.497|61.5701|-149.497|bear air|
+61.6253|-149.432|61.6253|-149.432|bear paw masonry|
+61.5821|-149.438|61.5821|-149.438|bears den restaurant|
+61.7321|-150.12|61.7321|-150.12|beautiful homes by jeremy|
+61.5531|-149.651|61.5531|-149.651|becky's kettle korn|
+61.5368|-149.457|61.5368|-149.457|bee teddybear ranch|
+61.5846|-149.352|61.5846|-149.352|beehive beauty shop|
+61.5841|-149.466|61.5841|-149.466|bench transmission & gear|
+61.5812|-149.448|61.5812|-149.448|benefits consulting svc|
+61.6202|-149.381|61.6202|-149.381|berg development inc|
+61.5595|-149.514|61.5595|-149.514|berg masonry|
+61.5858|-149.493|61.5858|-149.493|berg roofing|
+61.7321|-150.12|61.7321|-150.12|beryozava elementary school|
+61.5257|-149.496|61.5257|-149.496|besse engineering|
+61.5811|-149.433|61.5811|-149.433|best in the land realty & ins|
+61.5775|-149.483|61.5775|-149.483|best western inn|
+61.5531|-149.651|61.5531|-149.651|better building concepts|
+61.5895|-149.406|61.5895|-149.406|natalie beyeler do|
+61.6141|-149.457|61.6141|-149.457|big bad wolf custom builders|
+61.5811|-149.45|61.5811|-149.45|big boys toys storage|
+61.5811|-149.45|61.5811|-149.45|big lake baptist church|
+61.6141|-149.457|61.6141|-149.457|big lake lions club|
+61.5671|-149.314|61.5671|-149.314|big lake polaris|
+61.6081|-149.3|61.6081|-149.3|big valley trucking|
+61.5411|-149.588|61.5411|-149.588|birch grove studios|
+61.3981|-149.843|61.3981|-149.843|birch harbor baptist church|
+61.5901|-149.444|61.5901|-149.444|birches|
+61.6141|-149.457|61.6141|-149.457|bit o lab kennels|
+61.5855|-149.327|61.5855|-149.327|blaine's picture framing|
+61.5952|-149.377|61.5952|-149.377|blind store|
+61.6091|-149.77|61.6091|-149.77|blizzard plows of alaska|
+61.5777|-149.412|61.5777|-149.412|blockbuster video|
+61.1991|-149.903|61.1991|-149.903|foster grandparent programs|
+61.2174|-149.904|61.2174|-149.904|foster pepper rubini & reeves|
+61.1878|-149.914|61.1878|-149.914|fountain chevron|
+61.2158|-149.881|61.2158|-149.881|fountains condominium|
+61.1991|-149.903|61.1991|-149.903|four a's|
+61.1811|-149.896|61.1811|-149.896|four dam pool power agency|
+61.2171|-149.771|61.2171|-149.771|four seasons|
+61.0893|-149.811|61.0893|-149.811|four seasons masonry|
+61.1261|-149.948|61.1261|-149.948|four seasons roofing|
+61.1088|-149.84|61.1088|-149.84|four seasons sunrooms|
+61.2031|-149.749|61.2031|-149.749|four seasons travel|
+61.1282|-149.847|61.1282|-149.847|four sons construction|
+61.1201|-149.89|61.1201|-149.89|four waters aquatics|
+61.2001|-149.867|61.2001|-149.867|fourth & feather product|
+61.2188|-149.896|61.2188|-149.896|fourth avenue barber shop|
+61.2183|-149.894|61.2183|-149.894|fourth avenue trolley tours|
+61.1836|-149.796|61.1836|-149.796|fowler & assoc|
+61.2141|-149.888|61.2141|-149.888|fowler agency|
+61.1088|-149.783|61.1088|-149.783|fowler gun & machine shop|
+61.1901|-149.892|61.1901|-149.892|michael fowler|
+61.2031|-149.749|61.2031|-149.749|foxie roxies|
+61.1536|-149.884|61.1536|-149.884|foxwood condominiums|
+61.1381|-149.844|61.1381|-149.844|frame cache|
+61.1938|-149.878|61.1938|-149.878|frame of mind|
+61.1883|-149.896|61.1883|-149.896|frame workshop & gallery|
+61.0939|-149.818|61.0939|-149.818|framemaker|
+61.1955|-149.878|61.1955|-149.878|frames etc|
+61.1883|-149.894|61.1883|-149.894|frameworks|
+61.199|-149.9|61.199|-149.9|framewright|
+61.1851|-149.934|61.1851|-149.934|fran's place bed & breakfast|
+61.1973|-149.748|61.1973|-149.748|francis gallela & assoc|
+61.1715|-149.9|61.1715|-149.9|frank flavin photography|
+61.2201|-149.895|61.2201|-149.895|frank s kozoil law offices|
+61.1101|-149.857|61.1101|-149.857|a & g enterprises inc|
+61.1101|-149.857|61.1101|-149.857|c & v backhoe service|
+61.1101|-149.857|61.1101|-149.857|s & a enterprises|
+61.1101|-149.857|61.1101|-149.857|x alaska inc|
+61.1101|-149.857|61.1101|-149.857|mc kinley landscaping inc|
+61.1101|-149.857|61.1101|-149.857|cats eye excavating inc|
+61.1101|-149.857|61.1101|-149.857|kincaid & son construction co|
+61.1101|-149.857|61.1101|-149.857|wood & beek contracting inc|
+61.1101|-149.857|61.1101|-149.857|fish alaska publication llc|
+61.1101|-149.857|61.1101|-149.857|instructional resources co|
+61.1101|-149.857|61.1101|-149.857|alpha fur dressing inc|
+61.1101|-149.857|61.1101|-149.857|alaskan quality log homes|
+61.1045|-149.859|61.1045|-149.859|titan enterprises|
+61.131|-149.878|61.131|-149.878|speedy auto glass|
+61.1024|-149.853|61.1024|-149.853|kenny withrow|
+61.1097|-149.881|61.1097|-149.881|alaskan paving & cnstr co|
+61.1314|-149.862|61.1314|-149.862|advanced industries|
+61.117|-149.879|61.117|-149.879|c cohn fur processors|
+61.145|-149.878|61.145|-149.878|alaskamen magazine inc|
+61.1215|-149.926|61.1215|-149.926|wood n things|
+61.1426|-149.877|61.1426|-149.877|tweedie douglas marketing|
+61.1377|-149.864|61.1377|-149.864|grizzly advertising spc|
+61.1444|-149.867|61.1444|-149.867|lees embroidery|
+61.126|-149.934|61.126|-149.934|labau frest rsurces consulting|
+61.1407|-149.887|61.1407|-149.887|m&l landscaping|
+61.1444|-149.876|61.1444|-149.876|bridgettes dog grooming|
+61.1074|-149.862|61.1074|-149.862|alpine animalhospital|
+61.107|-149.862|61.107|-149.862|evergreen landscaping inc|
+61.1053|-149.86|61.1053|-149.86|great alaska lawn and ldscpg|
+61.1124|-149.862|61.1124|-149.862|karin e schmidt dvm|
+61.1377|-149.864|61.1377|-149.864|pet xpress/the dog wash|
+61.1293|-149.862|61.1293|-149.862|amd enterprises|
+61.1426|-149.897|61.1426|-149.897|chaluka corp|
+64.8421|-147.733|64.8421|-147.733|workers' compensation conslnt|
+64.8579|-147.77|64.8579|-147.77|works creative catering|
+64.8411|-147.718|64.8411|-147.718|world eskimo-indian olympics|
+64.8429|-147.716|64.8429|-147.716|world plus inc|
+64.8523|-147.769|64.8523|-147.769|world wide movers inc|
+64.8081|-147.851|64.8081|-147.851|wright air svc|
+64.9401|-147.402|64.9401|-147.402|wsa alaska|
+64.9039|-147.623|64.9039|-147.623|wsi certified web solutions|
+64.8132|-147.7|64.8132|-147.7|wtw trucking|
+64.8452|-147.714|64.8452|-147.714|wyatt enterprises|
+64.8916|-147.563|64.8916|-147.563|wynfromere farms|
+64.8132|-147.731|64.8132|-147.731|x-ray motors|
+64.8495|-147.716|64.8495|-147.716|xavier hall|
+64.8371|-147.716|64.8371|-147.716|xentrix dezinz|
+64.8471|-147.706|64.8471|-147.706|xerox corp|
+64.8169|-147.779|64.8169|-147.779|xral laboratories|
+64.8438|-147.901|64.8438|-147.901|yak estates apartments|
+64.843|-147.722|64.843|-147.722|leslie yamamoto|
+64.8421|-147.686|64.8421|-147.686|dale yoder|
+64.8372|-147.809|64.8372|-147.809|you're special flowers & gifts|
+64.8465|-147.74|64.8465|-147.74|young learners preschool|
+64.8199|-147.737|64.8199|-147.737|young life|
+64.8191|-147.696|64.8191|-147.696|young's cleaning svc|
+64.8127|-147.745|64.8127|-147.745|young's gear inc|
+64.8344|-147.762|64.8344|-147.762|youth facility juvenile prbtn|
+64.8419|-147.722|64.8419|-147.722|youth sports bingo|
+64.8926|-147.465|64.8926|-147.465|youth with a mission|
+64.824|-147.899|64.824|-147.899|yukon air svc|
+64.8296|-147.848|64.8296|-147.848|yukon koyukuk school district|
+64.842|-147.721|64.842|-147.721|yukon quest cache|
+64.8455|-147.696|64.8455|-147.696|yukon quest intl|
+64.8448|-147.714|64.8448|-147.714|yukon river intl tribal wtrshd|
+64.8331|-147.812|64.8331|-147.812|yukon services|
+60.5011|-151.275|60.5011|-151.275|tutka hatchery|
+60.6331|-151.163|60.6331|-151.163|eklutna salmon hatchery|
+60.5555|-151.249|60.5555|-151.249|unocal corporation|
+60.6331|-151.163|60.6331|-151.163|industrial refuse|
+60.564|-151.242|60.564|-151.242|industrial instrument service|
+60.6331|-151.163|60.6331|-151.163|alaska pipe rcvery specialists|
+60.6331|-151.163|60.6331|-151.163|arc tech services|
+60.5643|-151.195|60.5643|-151.195|cornerstone construction|
+60.6331|-151.163|60.6331|-151.163|holt construction|
+60.6742|-151.213|60.6742|-151.213|alaskan new home builders|
+60.6331|-151.163|60.6331|-151.163|jc bookey excavating|
+60.6331|-151.163|60.6331|-151.163|lalasa kennels|
+60.5619|-151.277|60.5619|-151.277|herb fireweed garden & gifts|
+60.5559|-151.262|60.5559|-151.262|groomingdales|
+60.6331|-151.163|60.6331|-151.163|moores landscaping|
+60.5569|-151.059|60.5569|-151.059|d&n landscaping|
+60.6331|-151.163|60.6331|-151.163|fv cape spencer|
+60.6331|-151.163|60.6331|-151.163|efta commercial fishing|
+60.6331|-151.163|60.6331|-151.163|evergreen alaska inc|
+54.9801|-162.099|54.9801|-162.099|peter pan seafoods inc|
+54.9801|-162.099|54.9801|-162.099|dream girl owners fv mac|
+58.7051|-156.633|58.7051|-156.633|king salmon ground service|
+58.7051|-156.633|58.7051|-156.633|odw & son construction|
+58.7051|-156.633|58.7051|-156.633|badger electric|
+58.7051|-156.633|58.7051|-156.633|harris electric inc|
+58.7051|-156.633|58.7051|-156.633|ram rentals|
+58.7051|-156.633|58.7051|-156.633|national park svcs dept intr|
+58.7051|-156.633|58.7051|-156.633|diane woods greenhouse|
+58.7051|-156.633|58.7051|-156.633|lone wolf construction|
+58.7051|-156.633|58.7051|-156.633|iliamna lake contractors llc|
+57.8591|-152.679|57.8591|-152.679|custom builders|
+57.7891|-152.397|57.7891|-152.397|kiewit pacific co|
+57.8591|-152.679|57.8591|-152.679|l g schneider & sons inc|
+57.8591|-152.679|57.8591|-152.679|f & r inc|
+57.8591|-152.679|57.8591|-152.679|majdic & son inc|
+61.5812|-149.436|61.5812|-149.436|blockbuster video|
+61.5825|-149.429|61.5825|-149.429|blood bank of alaska inc|
+61.6104|-149.425|61.6104|-149.425|bluestreak enterprises|
+61.6106|-149.392|61.6106|-149.392|bob mann appraisal svc|
+61.5803|-149.534|61.5803|-149.534|bob'sleds of alaska|
+61.5856|-149.316|61.5856|-149.316|body's in motion dance apparel|
+61.5887|-149.407|61.5887|-149.407|bogard dental|
+61.6141|-149.449|61.6141|-149.449|bogard mail svc|
+61.6144|-149.326|61.6144|-149.326|bogard mail svc|
+61.6141|-149.457|61.6141|-149.457|bogard road baptist|
+61.6141|-149.457|61.6141|-149.457|jack bohnert|
+61.6141|-149.457|61.6141|-149.457|bolinder excavating|
+61.6141|-149.457|61.6141|-149.457|bonny masonry|
+61.5824|-149.46|61.5824|-149.46|books inc|
+61.5531|-149.651|61.5531|-149.651|bootleg electric|
+61.6091|-149.77|61.6091|-149.77|borealis beach club|
+61.6141|-149.457|61.6141|-149.457|borealis property svc|
+61.7321|-150.12|61.7321|-150.12|borough transfer site|
+61.6118|-149.444|61.6118|-149.444|bowker mechanical|
+61.5825|-149.429|61.5825|-149.429|boyer bowers & tegeler|
+61.5811|-149.43|61.5811|-149.43|boyer bowers & tegeler|
+61.6006|-149.357|61.6006|-149.357|boys & girls clubs|
+61.7321|-150.12|61.7321|-150.12|brain garden|
+61.6141|-149.457|61.6141|-149.457|branching out|
+61.5867|-149.43|61.5867|-149.43|brett memorial ice arena|
+61.5869|-149.428|61.5869|-149.428|brett memorial ice arena|
+61.5776|-149.411|61.5776|-149.411|brew bakers espresso|
+61.6141|-149.457|61.6141|-149.457|bridgestone bookkeeping svc|
+61.6141|-149.457|61.6141|-149.457|brite learning materials|
+61.5822|-149.463|61.5822|-149.463|brown jug inc|
+61.5816|-149.452|61.5816|-149.452|brown's electrical supply|
+61.6141|-149.457|61.6141|-149.457|brown's plumbing|
+61.6141|-149.457|61.6141|-149.457|browngoetz consulting|
+61.5765|-149.407|61.5765|-149.407|bruce's street & skate|
+61.5788|-149.483|61.5788|-149.483|bruno's concrete|
+61.5801|-149.461|61.5801|-149.461|bruns concrete & construction|
+61.1901|-149.892|61.1901|-149.892|franke enterprises|
+61.1983|-149.88|61.1983|-149.88|franklin & assoc|
+61.2161|-149.876|61.2161|-149.876|franks concrete & masonry|
+61.2174|-149.888|61.2174|-149.888|fraternal order of ak troopers|
+61.1662|-149.845|61.1662|-149.845|fraternal order of eagles|
+61.1879|-149.886|61.1879|-149.886|fraud control unit|
+61.223|-149.872|61.223|-149.872|fraud control unit|
+61.1901|-149.892|61.1901|-149.892|frawner corp|
+61.159|-149.858|61.159|-149.858|fred astaire dance studio|
+61.1377|-149.864|61.1377|-149.864|fred astaire dance studio|
+61.1935|-149.804|61.1935|-149.804|fred barbee & assoc|
+61.2174|-149.888|61.2174|-149.888|fred meyer jewelers|
+61.1444|-149.867|61.1444|-149.867|fred meyer jewelers|
+61.195|-149.864|61.195|-149.864|fred meyer one stop shopping|
+61.1375|-149.916|61.1375|-149.916|fred meyer one stop shopping|
+61.1373|-149.839|61.1373|-149.839|fred meyer one stop shopping|
+61.2099|-149.737|61.2099|-149.737|fred meyer one stop shopping|
+61.1946|-149.941|61.1946|-149.941|fred walatka & assoc|
+61.1973|-149.877|61.1973|-149.877|fred's bail bonding|
+61.1541|-149.958|61.1541|-149.958|fred's towing & recovery|
+61.2188|-149.896|61.2188|-149.896|frederick h hahn law offices|
+61.2171|-149.795|61.2171|-149.795|free methodist church|
+61.1541|-149.958|61.1541|-149.958|freedom refuse|
+61.1973|-149.877|61.1973|-149.877|freedom ventures trust|
+61.2148|-149.9|61.2148|-149.9|freelance legal secretary|
+61.2099|-149.762|61.2099|-149.762|freight management svc|
+61.163|-149.859|61.163|-149.859|freightliner of alaska|
+61.2239|-149.823|61.2239|-149.823|freightliner of alaska|
+61.1896|-149.91|61.1896|-149.91|fremont transfer hair est|
+61.1923|-149.886|61.1923|-149.886|french bean|
+61.1541|-149.958|61.1541|-149.958|french consulate|
+61.1935|-149.905|61.1935|-149.905|french oven|
+61.1854|-149.928|61.1854|-149.928|french's construction|
+61.1152|-149.914|61.1152|-149.914|fresh & clean janitorial svc|
+61.1387|-149.915|61.1387|-149.915|wilson riley k dvm vet|
+61.1387|-149.915|61.1387|-149.915|james e gaarder dvm dacvo|
+61.1145|-149.876|61.1145|-149.876|d&t enterprises|
+61.1421|-149.9|61.1421|-149.9|dimond greenhouses inc|
+61.1053|-149.859|61.1053|-149.859|alaska winter gardens|
+61.1124|-149.862|61.1124|-149.862|handler corp|
+61.1441|-149.878|61.1441|-149.878|kitchels alaska guide service|
+61.1243|-149.927|61.1243|-149.927|romo equipment co|
+61.1337|-149.878|61.1337|-149.878|brooks range supply inc|
+61.1209|-149.884|61.1209|-149.884|roger hickel contracting|
+61.1084|-149.861|61.1084|-149.861|f & f construction|
+61.1426|-149.878|61.1426|-149.878|acme fence co inc|
\ No newline at end of file
diff --git a/hyracks-examples/hyracks-integration-tests/pom.xml b/hyracks-examples/hyracks-integration-tests/pom.xml
index 8ef1706..e5c2c51 100644
--- a/hyracks-examples/hyracks-integration-tests/pom.xml
+++ b/hyracks-examples/hyracks-integration-tests/pom.xml
@@ -76,6 +76,13 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-test-support</artifactId>
<version>0.1.7-SNAPSHOT</version>
<type>jar</type>
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
index fd0b7b5..b4a1d60 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/btree/BTreeOperatorsTest.java
@@ -47,34 +47,38 @@
import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.test.support.TestBTreeRegistryProvider;
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
public class BTreeOperatorsTest extends AbstractIntegrationTest {
static {
- TestStorageManagerComponentHolder.init(8192, 20);
+ TestStorageManagerComponentHolder.init(8192, 20, 20);
}
private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
- private IBTreeRegistryProvider btreeRegistryProvider = new TestBTreeRegistryProvider();
+ private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = new TestTreeIndexRegistryProvider();
+ private ITreeIndexOpHelperFactory opHelperFactory = new BTreeOpHelperFactory();
private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
"ddMMyy-hhmmssSS");
private final static String sep = System.getProperty("file.separator");
-
+
// field, type and key declarations for primary index
private int primaryFieldCount = 6;
private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
@@ -82,9 +86,9 @@
private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
private TypeAwareTupleWriterFactory primaryTupleWriterFactory = new TypeAwareTupleWriterFactory(
primaryTypeTraits);
- private IBTreeInteriorFrameFactory primaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ private ITreeIndexFrameFactory primaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
primaryTupleWriterFactory);
- private IBTreeLeafFrameFactory primaryLeafFrameFactory = new NSMLeafFrameFactory(
+ private ITreeIndexFrameFactory primaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(
primaryTupleWriterFactory);
private static String primaryBtreeName = "primary"
@@ -112,9 +116,9 @@
private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
private TypeAwareTupleWriterFactory secondaryTupleWriterFactory = new TypeAwareTupleWriterFactory(
secondaryTypeTraits);
- private IBTreeInteriorFrameFactory secondaryInteriorFrameFactory = new NSMInteriorFrameFactory(
+ private ITreeIndexFrameFactory secondaryInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
secondaryTupleWriterFactory);
- private IBTreeLeafFrameFactory secondaryLeafFrameFactory = new NSMLeafFrameFactory(
+ private ITreeIndexFrameFactory secondaryLeafFrameFactory = new BTreeNSMLeafFrameFactory(
secondaryTupleWriterFactory);
private static String secondaryBtreeName = "secondary"
@@ -195,11 +199,12 @@
NC1_ID);
int[] fieldPermutation = { 0, 1, 2, 4, 5, 7 };
- BTreeBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(
- spec, storageManager, btreeRegistryProvider,
+ TreeIndexBulkLoadOperatorDescriptor primaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(
+ spec, storageManager, treeIndexRegistryProvider,
primaryBtreeSplitProvider, primaryInteriorFrameFactory,
primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, fieldPermutation, 0.7f);
+ primaryComparatorFactories, fieldPermutation, 0.7f,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeBulkLoad, NC1_ID);
@@ -214,6 +219,22 @@
}
@Test
+ public void showPrimaryIndexStats() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(
+ spec, storageManager, treeIndexRegistryProvider,
+ primaryBtreeSplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits,
+ primaryComparatorFactories, opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
+ primaryStatsOp, NC1_ID);
+
+ spec.addRoot(primaryStatsOp);
+ runTest(spec);
+ }
+
+ @Test
public void scanPrimaryIndexTest() throws Exception {
JobSpecification spec = new JobSpecification();
@@ -240,11 +261,11 @@
int[] highKeyFields = null; // + infinity
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, primaryRecDesc, storageManager, btreeRegistryProvider,
- primaryBtreeSplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, true, lowKeyFields, highKeyFields,
- true, true);
+ spec, primaryRecDesc, storageManager,
+ treeIndexRegistryProvider, primaryBtreeSplitProvider,
+ primaryInteriorFrameFactory, primaryLeafFrameFactory,
+ primaryTypeTraits, primaryComparatorFactories, true,
+ lowKeyFields, highKeyFields, true, true, opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeSearchOp, NC1_ID);
@@ -293,11 +314,11 @@
int[] highKeyFields = { 1 };
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, primaryRecDesc, storageManager, btreeRegistryProvider,
- primaryBtreeSplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, true, lowKeyFields, highKeyFields,
- true, true);
+ spec, primaryRecDesc, storageManager,
+ treeIndexRegistryProvider, primaryBtreeSplitProvider,
+ primaryInteriorFrameFactory, primaryLeafFrameFactory,
+ primaryTypeTraits, primaryComparatorFactories, true,
+ lowKeyFields, highKeyFields, true, true, opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeSearchOp, NC1_ID);
@@ -342,11 +363,11 @@
// scan primary index
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, primaryRecDesc, storageManager, btreeRegistryProvider,
- primaryBtreeSplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, true, lowKeyFields, highKeyFields,
- true, true);
+ spec, primaryRecDesc, storageManager,
+ treeIndexRegistryProvider, primaryBtreeSplitProvider,
+ primaryInteriorFrameFactory, primaryLeafFrameFactory,
+ primaryTypeTraits, primaryComparatorFactories, true,
+ lowKeyFields, highKeyFields, true, true, opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeSearchOp, NC1_ID);
@@ -362,11 +383,12 @@
// load secondary index
int[] fieldPermutation = { 3, 0 };
- BTreeBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(
- spec, storageManager, btreeRegistryProvider,
+ TreeIndexBulkLoadOperatorDescriptor secondaryBtreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(
+ spec, storageManager, treeIndexRegistryProvider,
secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
secondaryLeafFrameFactory, secondaryTypeTraits,
- secondaryComparatorFactories, fieldPermutation, 0.7f);
+ secondaryComparatorFactories, fieldPermutation, 0.7f,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
secondaryBtreeBulkLoad, NC1_ID);
@@ -414,11 +436,12 @@
// search secondary index
BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, secondaryRecDesc, storageManager, btreeRegistryProvider,
- secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
- secondaryLeafFrameFactory, secondaryTypeTraits,
- secondaryComparatorFactories, true, secondaryLowKeyFields,
- secondaryHighKeyFields, true, true);
+ spec, secondaryRecDesc, storageManager,
+ treeIndexRegistryProvider, secondaryBtreeSplitProvider,
+ secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
+ secondaryTypeTraits, secondaryComparatorFactories, true,
+ secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
secondaryBtreeSearchOp, NC1_ID);
@@ -429,11 +452,12 @@
// search primary index
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, primaryRecDesc, storageManager, btreeRegistryProvider,
- primaryBtreeSplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, true, primaryLowKeyFields,
- primaryHighKeyFields, true, true);
+ spec, primaryRecDesc, storageManager,
+ treeIndexRegistryProvider, primaryBtreeSplitProvider,
+ primaryInteriorFrameFactory, primaryLeafFrameFactory,
+ primaryTypeTraits, primaryComparatorFactories, true,
+ primaryLowKeyFields, primaryHighKeyFields, true, true,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeSearchOp, NC1_ID);
@@ -491,23 +515,23 @@
// insert into primary index
int[] primaryFieldPermutation = { 0, 1, 2, 4, 5, 7 };
- BTreeInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new BTreeInsertUpdateDeleteOperatorDescriptor(
- spec, ordersDesc, storageManager, btreeRegistryProvider,
+ TreeIndexInsertUpdateDeleteOperatorDescriptor primaryBtreeInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+ spec, ordersDesc, storageManager, treeIndexRegistryProvider,
primaryBtreeSplitProvider, primaryInteriorFrameFactory,
primaryLeafFrameFactory, primaryTypeTraits,
primaryComparatorFactories, primaryFieldPermutation,
- BTreeOp.BTO_INSERT);
+ IndexOp.INSERT, opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeInsertOp, NC1_ID);
// first secondary index
int[] fieldPermutationB = { 4, 0 };
- BTreeInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new BTreeInsertUpdateDeleteOperatorDescriptor(
- spec, ordersDesc, storageManager, btreeRegistryProvider,
+ TreeIndexInsertUpdateDeleteOperatorDescriptor secondaryInsertOp = new TreeIndexInsertUpdateDeleteOperatorDescriptor(
+ spec, ordersDesc, storageManager, treeIndexRegistryProvider,
secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
secondaryLeafFrameFactory, secondaryTypeTraits,
secondaryComparatorFactories, fieldPermutationB,
- BTreeOp.BTO_INSERT);
+ IndexOp.INSERT, opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
secondaryInsertOp, NC1_ID);
@@ -562,11 +586,12 @@
// search secondary index
BTreeSearchOperatorDescriptor secondaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, secondaryRecDesc, storageManager, btreeRegistryProvider,
- secondaryBtreeSplitProvider, secondaryInteriorFrameFactory,
- secondaryLeafFrameFactory, secondaryTypeTraits,
- secondaryComparatorFactories, true, secondaryLowKeyFields,
- secondaryHighKeyFields, true, true);
+ spec, secondaryRecDesc, storageManager,
+ treeIndexRegistryProvider, secondaryBtreeSplitProvider,
+ secondaryInteriorFrameFactory, secondaryLeafFrameFactory,
+ secondaryTypeTraits, secondaryComparatorFactories, true,
+ secondaryLowKeyFields, secondaryHighKeyFields, true, true,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
secondaryBtreeSearchOp, NC1_ID);
@@ -577,11 +602,12 @@
// search primary index
BTreeSearchOperatorDescriptor primaryBtreeSearchOp = new BTreeSearchOperatorDescriptor(
- spec, primaryRecDesc, storageManager, btreeRegistryProvider,
- primaryBtreeSplitProvider, primaryInteriorFrameFactory,
- primaryLeafFrameFactory, primaryTypeTraits,
- primaryComparatorFactories, true, primaryLowKeyFields,
- primaryHighKeyFields, true, true);
+ spec, primaryRecDesc, storageManager,
+ treeIndexRegistryProvider, primaryBtreeSplitProvider,
+ primaryInteriorFrameFactory, primaryLeafFrameFactory,
+ primaryTypeTraits, primaryComparatorFactories, true,
+ primaryLowKeyFields, primaryHighKeyFields, true, true,
+ opHelperFactory);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec,
primaryBtreeSearchOp, NC1_ID);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
index db5985d..a8db325 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/integration/TPCHCustomerOrderHashJoinTest.java
@@ -80,7 +80,15 @@
}
/*
- * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL, C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL ); TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL, O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT NULL, O_COMMENT VARCHAR(79) NOT NULL );
+ * TPCH Customer table: CREATE TABLE CUSTOMER ( C_CUSTKEY INTEGER NOT NULL,
+ * C_NAME VARCHAR(25) NOT NULL, C_ADDRESS VARCHAR(40) NOT NULL, C_NATIONKEY
+ * INTEGER NOT NULL, C_PHONE CHAR(15) NOT NULL, C_ACCTBAL DECIMAL(15,2) NOT
+ * NULL, C_MKTSEGMENT CHAR(10) NOT NULL, C_COMMENT VARCHAR(117) NOT NULL );
+ * TPCH Orders table: CREATE TABLE ORDERS ( O_ORDERKEY INTEGER NOT NULL,
+ * O_CUSTKEY INTEGER NOT NULL, O_ORDERSTATUS CHAR(1) NOT NULL, O_TOTALPRICE
+ * DECIMAL(15,2) NOT NULL, O_ORDERDATE DATE NOT NULL, O_ORDERPRIORITY
+ * CHAR(15) NOT NULL, O_CLERK CHAR(15) NOT NULL, O_SHIPPRIORITY INTEGER NOT
+ * NULL, O_COMMENT VARCHAR(79) NOT NULL );
*/
@Test
@@ -364,9 +372,11 @@
IOperatorDescriptor printer = DEBUG ? new PrinterOperatorDescriptor(spec)
: new NullSinkOperatorDescriptor(spec);
- // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
- // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
- // LineFileWriteOperatorDescriptor printer = new LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
+ // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new
+ // FileSplit(NC1_ID, new FileReference(new File(
+ // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
+ // LineFileWriteOperatorDescriptor printer = new
+ // LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -446,9 +456,11 @@
IOperatorDescriptor printer = DEBUG ? new PrinterOperatorDescriptor(spec)
: new NullSinkOperatorDescriptor(spec);
- // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
- // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
- // LineFileWriteOperatorDescriptor printer = new LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
+ // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new
+ // FileSplit(NC1_ID, new FileReference(new File(
+ // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
+ // LineFileWriteOperatorDescriptor printer = new
+ // LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
@@ -528,9 +540,11 @@
IOperatorDescriptor printer = DEBUG ? new PrinterOperatorDescriptor(spec)
: new NullSinkOperatorDescriptor(spec);
- // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
- // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
- // LineFileWriteOperatorDescriptor printer = new LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
+ // FileSplit[] custOrdersJoinSplits = new FileSplit[] { new
+ // FileSplit(NC1_ID, new FileReference(new File(
+ // "data/tpch0.001/custOrdersLeftOuterJoin.csv"))) };
+ // LineFileWriteOperatorDescriptor printer = new
+ // LineFileWriteOperatorDescriptor(spec, custOrdersJoinSplits);
PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
IConnectorDescriptor ordJoinConn = new OneToOneConnectorDescriptor(spec);
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
index 2cfe438..c84ef74 100644
--- a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/invertedindex/InvertedIndexOperatorsTest.java
@@ -21,9 +21,11 @@
import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
import edu.uci.ics.hyracks.storage.am.invertedindex.dataflow.BinaryTokenizerOperatorDescriptor;
import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
public class InvertedIndexOperatorsTest extends AbstractIntegrationTest {
@@ -45,7 +47,10 @@
RecordDescriptor tokenizerRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
UTF8StringSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE });
- IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(' ');
+
+ ITokenFactory tokenFactory = new UTF8WordTokenFactory();
+ IBinaryTokenizerFactory tokenizerFactory = new DelimitedUTF8StringBinaryTokenizerFactory(true, false,
+ tokenFactory);
int[] tokenFields = { 1 };
int[] projFields = { 0 };
BinaryTokenizerOperatorDescriptor binaryTokenizer = new BinaryTokenizerOperatorDescriptor(spec,
diff --git a/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeOperatorsTest.java b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeOperatorsTest.java
new file mode 100644
index 0000000..5f6eb3a
--- /dev/null
+++ b/hyracks-examples/hyracks-integration-tests/src/test/java/edu/uci/ics/hyracks/tests/rtree/RTreeOperatorsTest.java
@@ -0,0 +1,431 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.tests.rtree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraintHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.DoubleParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.IValueParserFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.parsers.UTF8StringParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.ConstantFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.file.DelimitedDataTupleParserFactory;
+import edu.uci.ics.hyracks.dataflow.std.file.FileScanOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.FileSplit;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.dataflow.std.misc.ConstantTupleSourceOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexStatsOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.dataflow.RTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+import edu.uci.ics.hyracks.test.support.TestTreeIndexRegistryProvider;
+import edu.uci.ics.hyracks.tests.integration.AbstractIntegrationTest;
+
+public class RTreeOperatorsTest extends AbstractIntegrationTest {
+ static {
+ TestStorageManagerComponentHolder.init(8192, 20, 20);
+ }
+
+ private IStorageManagerInterface storageManager = new TestStorageManagerInterface();
+ private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider = new TestTreeIndexRegistryProvider();
+ private ITreeIndexOpHelperFactory opHelperFactory = new RTreeOpHelperFactory();
+ private ITreeIndexOpHelperFactory bTreeopHelperFactory = new BTreeOpHelperFactory();
+
+ private final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ private final static String sep = System.getProperty("file.separator");
+
+ // field, type and key declarations for primary R-tree index
+ private int primaryFieldCount = 5;
+ private int primaryKeyFieldCount = 4;
+ private ITypeTrait[] primaryTypeTraits = new ITypeTrait[primaryFieldCount];
+ private IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[primaryKeyFieldCount];
+
+ private RTreeTypeAwareTupleWriterFactory primaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
+ primaryTypeTraits);
+ private ITreeIndexFrameFactory primaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+ primaryTupleWriterFactory, primaryKeyFieldCount);
+ private ITreeIndexFrameFactory primaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(primaryTupleWriterFactory,
+ primaryKeyFieldCount);
+
+ private static String primaryRTreeName = "primary" + simpleDateFormat.format(new Date());
+ private static String primaryFileName = System.getProperty("java.io.tmpdir") + sep + primaryRTreeName;
+
+ private IFileSplitProvider primaryRTreeSplitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryFileName))) });
+
+ private RecordDescriptor primaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE });
+
+ // field, type and key declarations for primary B-tree index
+ private int primaryBTreeFieldCount = 10;
+ private ITypeTrait[] primaryBTreeTypeTraits = new ITypeTrait[primaryBTreeFieldCount];
+ private int primaryBTreeKeyFieldCount = 1;
+ private IBinaryComparatorFactory[] primaryBTreeComparatorFactories = new IBinaryComparatorFactory[primaryBTreeKeyFieldCount];
+ private TypeAwareTupleWriterFactory primaryBTreeTupleWriterFactory = new TypeAwareTupleWriterFactory(
+ primaryBTreeTypeTraits);
+ private ITreeIndexFrameFactory primaryBTreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(
+ primaryBTreeTupleWriterFactory);
+ private ITreeIndexFrameFactory primaryBTreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(
+ primaryBTreeTupleWriterFactory);
+
+ private static String primaryBTreeName = "primaryBTree" + simpleDateFormat.format(new Date());
+ private static String primaryBTreeFileName = System.getProperty("java.io.tmpdir") + sep + primaryBTreeName;
+
+ private IFileSplitProvider primaryBTreeSplitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(primaryBTreeFileName))) });
+
+ private RecordDescriptor primaryBTreeRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE, });
+
+ // field, type and key declarations for secondary indexes
+ private int secondaryFieldCount = 5;
+ private ITypeTrait[] secondaryTypeTraits = new ITypeTrait[secondaryFieldCount];
+ private int secondaryKeyFieldCount = 4;
+ private IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[secondaryKeyFieldCount];
+ private RTreeTypeAwareTupleWriterFactory secondaryTupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(
+ secondaryTypeTraits);
+ private ITreeIndexFrameFactory secondaryInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(
+ secondaryTupleWriterFactory, secondaryKeyFieldCount);
+ private ITreeIndexFrameFactory secondaryLeafFrameFactory = new RTreeNSMLeafFrameFactory(
+ secondaryTupleWriterFactory, secondaryKeyFieldCount);
+
+ private static String secondaryRTreeName = "secondary" + simpleDateFormat.format(new Date());
+ private static String secondaryFileName = System.getProperty("java.io.tmpdir") + sep + secondaryRTreeName;
+
+ private IFileSplitProvider secondaryRTreeSplitProvider = new ConstantFileSplitProvider(
+ new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(secondaryFileName))) });
+
+ private RecordDescriptor secondaryRecDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE });
+
+ @Before
+ public void setup() {
+ // field, type and key declarations for primary R-tree index
+ primaryTypeTraits[0] = new TypeTrait(8);
+ primaryTypeTraits[1] = new TypeTrait(8);
+ primaryTypeTraits[2] = new TypeTrait(8);
+ primaryTypeTraits[3] = new TypeTrait(8);
+ primaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryComparatorFactories[0] = DoubleBinaryComparatorFactory.INSTANCE;
+ primaryComparatorFactories[1] = primaryComparatorFactories[0];
+ primaryComparatorFactories[2] = primaryComparatorFactories[0];
+ primaryComparatorFactories[3] = primaryComparatorFactories[0];
+
+ // field, type and key declarations for primary B-tree index
+ primaryBTreeTypeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[2] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[3] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[5] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ primaryBTreeTypeTraits[6] = new TypeTrait(8);
+ primaryBTreeTypeTraits[7] = new TypeTrait(8);
+ primaryBTreeTypeTraits[8] = new TypeTrait(8);
+ primaryBTreeTypeTraits[9] = new TypeTrait(8);
+ primaryBTreeComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+ // field, type and key declarations for secondary indexes
+ secondaryTypeTraits[0] = new TypeTrait(8);
+ secondaryTypeTraits[1] = new TypeTrait(8);
+ secondaryTypeTraits[2] = new TypeTrait(8);
+ secondaryTypeTraits[3] = new TypeTrait(8);
+ secondaryTypeTraits[4] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ secondaryComparatorFactories[0] = DoubleBinaryComparatorFactory.INSTANCE;
+ secondaryComparatorFactories[1] = secondaryComparatorFactories[0];
+ secondaryComparatorFactories[2] = secondaryComparatorFactories[0];
+ secondaryComparatorFactories[3] = secondaryComparatorFactories[0];
+ }
+
+ @Test
+ public void loadPrimaryBTreeIndexTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ FileSplit[] ordersSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+ "data/orders-with-locations.txt"))) };
+ IFileSplitProvider ordersSplitProvider = new ConstantFileSplitProvider(ordersSplits);
+ RecordDescriptor ordersDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE });
+
+ FileScanOperatorDescriptor ordScanner = new FileScanOperatorDescriptor(spec, ordersSplitProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] { UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE, UTF8StringParserFactory.INSTANCE,
+ DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+ DoubleParserFactory.INSTANCE }, '|'), ordersDesc);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, ordScanner, NC1_ID);
+
+ ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, 1000, new int[] { 0 },
+ new IBinaryComparatorFactory[] { UTF8StringBinaryComparatorFactory.INSTANCE }, ordersDesc);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, sorter, NC1_ID);
+
+ int[] fieldPermutation = { 0, 1, 2, 4, 5, 7, 9, 10, 11, 12 };
+ TreeIndexBulkLoadOperatorDescriptor primaryBTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+ storageManager, treeIndexRegistryProvider, primaryBTreeSplitProvider, primaryBTreeInteriorFrameFactory,
+ primaryBTreeLeafFrameFactory, primaryBTreeTypeTraits, primaryBTreeComparatorFactories,
+ fieldPermutation, 0.7f, bTreeopHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeBulkLoad, NC1_ID);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), ordScanner, 0, sorter, 0);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, primaryBTreeBulkLoad, 0);
+
+ spec.addRoot(primaryBTreeBulkLoad);
+ runTest(spec);
+ }
+
+ @Test
+ public void loadPrimaryIndexTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ FileSplit[] objectsSplits = new FileSplit[] { new FileSplit(NC1_ID, new FileReference(new File(
+ "data/spatial.txt"))) };
+ IFileSplitProvider objectsSplitProvider = new ConstantFileSplitProvider(objectsSplits);
+ RecordDescriptor objectsDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE });
+
+ FileScanOperatorDescriptor objScanner = new FileScanOperatorDescriptor(spec, objectsSplitProvider,
+ new DelimitedDataTupleParserFactory(new IValueParserFactory[] { DoubleParserFactory.INSTANCE,
+ DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE, DoubleParserFactory.INSTANCE,
+ UTF8StringParserFactory.INSTANCE }, '|'), objectsDesc);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, objScanner, NC1_ID);
+
+ int[] fieldPermutation = { 0, 1, 2, 3, 4 };
+ TreeIndexBulkLoadOperatorDescriptor primaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+ storageManager, treeIndexRegistryProvider, primaryRTreeSplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, fieldPermutation, 0.7f,
+ opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeBulkLoad, NC1_ID);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), objScanner, 0, primaryRTreeBulkLoad, 0);
+
+ spec.addRoot(primaryRTreeBulkLoad);
+ runTest(spec);
+ }
+
+ @Test
+ public void loadSecondaryIndexTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ // build dummy tuple containing nothing
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount * 2);
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("0", dos);
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+ int[] lowKeyFields = null; // - infinity
+ int[] highKeyFields = null; // + infinity
+
+ // scan primary index
+ BTreeSearchOperatorDescriptor primaryBTreeSearchOp = new BTreeSearchOperatorDescriptor(spec,
+ primaryBTreeRecDesc, storageManager, treeIndexRegistryProvider, primaryBTreeSplitProvider,
+ primaryBTreeInteriorFrameFactory, primaryBTreeLeafFrameFactory, primaryBTreeTypeTraits,
+ primaryBTreeComparatorFactories, true, lowKeyFields, highKeyFields, true, true, opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryBTreeSearchOp, NC1_ID);
+
+ // load secondary index
+ int[] fieldPermutation = { 6, 7, 8, 9, 0 };
+ TreeIndexBulkLoadOperatorDescriptor secondaryRTreeBulkLoad = new TreeIndexBulkLoadOperatorDescriptor(spec,
+ storageManager, treeIndexRegistryProvider, secondaryRTreeSplitProvider, secondaryInteriorFrameFactory,
+ secondaryLeafFrameFactory, secondaryTypeTraits, secondaryComparatorFactories, fieldPermutation, 0.7f,
+ opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeBulkLoad, NC1_ID);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryBTreeSearchOp, 0);
+ spec.connect(new OneToOneConnectorDescriptor(spec), primaryBTreeSearchOp, 0, secondaryRTreeBulkLoad, 0);
+
+ spec.addRoot(secondaryRTreeBulkLoad);
+ runTest(spec);
+ }
+
+ @Test
+ public void showPrimaryIndexStats() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ TreeIndexStatsOperatorDescriptor primaryStatsOp = new TreeIndexStatsOperatorDescriptor(spec, storageManager,
+ treeIndexRegistryProvider, primaryRTreeSplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryStatsOp, NC1_ID);
+
+ spec.addRoot(primaryStatsOp);
+ runTest(spec);
+ }
+
+ @Test
+ public void searchPrimaryIndexTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ // build tuple
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(primaryKeyFieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+ int[] keyFields = { 0, 1, 2, 3 };
+
+ RTreeSearchOperatorDescriptor primaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec, primaryRecDesc,
+ storageManager, treeIndexRegistryProvider, primaryRTreeSplitProvider, primaryInteriorFrameFactory,
+ primaryLeafFrameFactory, primaryTypeTraits, primaryComparatorFactories, keyFields, opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, primaryRTreeSearchOp, NC1_ID);
+
+ PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, primaryRTreeSearchOp, 0);
+ spec.connect(new OneToOneConnectorDescriptor(spec), primaryRTreeSearchOp, 0, printer, 0);
+
+ spec.addRoot(printer);
+ runTest(spec);
+ }
+
+ @Test
+ public void searchSecondaryIndexTest() throws Exception {
+ JobSpecification spec = new JobSpecification();
+
+ // build tuple
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(secondaryKeyFieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(61.2894, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(-149.624, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(61.8894, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(-149.024, dos);
+ tb.addFieldEndOffset();
+
+ ISerializerDeserializer[] keyRecDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ ConstantTupleSourceOperatorDescriptor keyProviderOp = new ConstantTupleSourceOperatorDescriptor(spec,
+ keyRecDesc, tb.getFieldEndOffsets(), tb.getByteArray(), tb.getSize());
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, keyProviderOp, NC1_ID);
+
+ int[] keyFields = { 0, 1, 2, 3 };
+
+ RTreeSearchOperatorDescriptor secondaryRTreeSearchOp = new RTreeSearchOperatorDescriptor(spec,
+ secondaryRecDesc, storageManager, treeIndexRegistryProvider, secondaryRTreeSplitProvider,
+ secondaryInteriorFrameFactory, secondaryLeafFrameFactory, secondaryTypeTraits,
+ secondaryComparatorFactories, keyFields, opHelperFactory);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, secondaryRTreeSearchOp, NC1_ID);
+
+ PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+ PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, printer, NC1_ID);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), keyProviderOp, 0, secondaryRTreeSearchOp, 0);
+ spec.connect(new OneToOneConnectorDescriptor(spec), secondaryRTreeSearchOp, 0, printer, 0);
+
+ spec.addRoot(printer);
+ runTest(spec);
+ }
+
+ @AfterClass
+ public static void cleanup() throws Exception {
+ File primary = new File(primaryFileName);
+ primary.deleteOnExit();
+
+ File primaryBTree = new File(primaryBTreeFileName);
+ primaryBTree.deleteOnExit();
+
+ File secondary = new File(secondaryFileName);
+ secondary.deleteOnExit();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs
index 8599738..dfac000 100644
--- a/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/text-example/textclient/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Tue Sep 28 14:37:42 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs
index 8599738..dfac000 100644
--- a/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/text-example/texthelper/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Tue Sep 28 14:37:42 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-examples/tpch-example/tpchapp/.classpath b/hyracks-examples/tpch-example/tpchapp/.classpath
index 3f62785..d0bec0f 100644
--- a/hyracks-examples/tpch-example/tpchapp/.classpath
+++ b/hyracks-examples/tpch-example/tpchapp/.classpath
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
diff --git a/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs
index 14ece6a..b61b886 100644
--- a/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/tpch-example/tpchapp/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,6 @@
-#Sun Aug 29 19:38:13 PDT 2010
+#Thu Aug 04 11:50:35 PDT 2011
eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
+org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
+org.eclipse.jdt.core.compiler.source=1.5
diff --git a/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs b/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs
index 2f0f2e3..375e12e 100644
--- a/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-examples/tpch-example/tpchclient/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Wed Aug 11 19:09:15 PDT 2010
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs b/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs
index 0b1e408..dfac000 100644
--- a/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-hadoop-compat/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Tue Nov 02 17:09:03 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-server/.classpath b/hyracks-server/.classpath
index 3f62785..d0bec0f 100644
--- a/hyracks-server/.classpath
+++ b/hyracks-server/.classpath
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.4"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
diff --git a/hyracks-server/.settings/org.eclipse.jdt.core.prefs b/hyracks-server/.settings/org.eclipse.jdt.core.prefs
index 14ece6a..b61b886 100644
--- a/hyracks-server/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-server/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,6 @@
-#Sun Aug 29 19:38:13 PDT 2010
+#Thu Aug 04 11:50:35 PDT 2011
eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.4
-org.eclipse.jdt.core.compiler.compliance=1.4
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
+org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.4
+org.eclipse.jdt.core.compiler.source=1.5
diff --git a/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs
index 0eb07fa..451c926 100644
--- a/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-storage-am-btree/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,264 @@
-#Mon Aug 30 11:42:12 PDT 2010
+#Fri May 20 19:34:05 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_assignment=0
+org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
+org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_after_package=1
+org.eclipse.jdt.core.formatter.blank_lines_before_field=0
+org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
+org.eclipse.jdt.core.formatter.blank_lines_before_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
+org.eclipse.jdt.core.formatter.blank_lines_before_package=0
+org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
+org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
+org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
+org.eclipse.jdt.core.formatter.comment.format_block_comments=true
+org.eclipse.jdt.core.formatter.comment.format_header=false
+org.eclipse.jdt.core.formatter.comment.format_html=true
+org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
+org.eclipse.jdt.core.formatter.comment.format_line_comments=true
+org.eclipse.jdt.core.formatter.comment.format_source_code=true
+org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
+org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
+org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
+org.eclipse.jdt.core.formatter.comment.line_length=80
+org.eclipse.jdt.core.formatter.compact_else_if=true
+org.eclipse.jdt.core.formatter.continuation_indentation=2
+org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
+org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_empty_lines=false
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
+org.eclipse.jdt.core.formatter.indentation.size=4
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
+org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.join_lines_in_comments=true
+org.eclipse.jdt.core.formatter.join_wrapped_lines=true
+org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.lineSplit=120
+org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.jdt.core.formatter.tabulation.char=space
+org.eclipse.jdt.core.formatter.tabulation.size=4
+org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
+org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks-storage-am-btree/pom.xml b/hyracks-storage-am-btree/pom.xml
index c51ce70..0ef06c0 100644
--- a/hyracks-storage-am-btree/pom.xml
+++ b/hyracks-storage-am-btree/pom.xml
@@ -31,6 +31,13 @@
<type>jar</type>
<scope>compile</scope>
</dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-dataflow-common</artifactId>
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
index 675bee3..25ab167 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrame.java
@@ -15,10 +15,15 @@
package edu.uci.ics.hyracks.storage.am.btree.api;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public interface IBTreeInteriorFrame extends IBTreeFrame {
+public interface IBTreeInteriorFrame extends ITreeIndexFrame {
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
+
public int getChildPageId(RangePredicate pred, MultiComparator srcCmp);
public int getLeftmostChildPageId(MultiComparator cmp);
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java
deleted file mode 100644
index bddd636..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeInteriorFrameFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import java.io.Serializable;
-
-public interface IBTreeInteriorFrameFactory extends Serializable {
- public IBTreeInteriorFrame getFrame();
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
index 9eb449c..53d892e 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrame.java
@@ -15,12 +15,17 @@
package edu.uci.ics.hyracks.storage.am.btree.api;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public interface IBTreeLeafFrame extends IBTreeFrame {
+public interface IBTreeLeafFrame extends ITreeIndexFrame {
+ public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
+
public void setNextLeaf(int nextPage);
public int getNextLeaf();
@@ -29,8 +34,8 @@
public int getPrevLeaf();
- public IBTreeTupleReference createTupleReference();
+ public ITreeIndexTupleReference createTupleReference();
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp);
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java
deleted file mode 100644
index 645c430..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriterFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import java.io.Serializable;
-
-public interface IBTreeTupleWriterFactory extends Serializable {
- public IBTreeTupleWriter createTupleWriter();
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
index 51e67ca..8e1d0a2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IFrameCompressor.java
@@ -15,9 +15,9 @@
package edu.uci.ics.hyracks.storage.am.btree.api;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public interface IFrameCompressor {
- public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
+ public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception;
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
index f45406f..d01db11 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IPrefixSlotManager.java
@@ -16,10 +16,11 @@
package edu.uci.ics.hyracks.storage.am.btree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
// a slot consists of two fields:
// first field is 1 byte, it indicates the slot number of a prefix tuple
@@ -42,7 +43,7 @@
// all prefixes are recomputed during a reorg or compaction
public interface IPrefixSlotManager {
- public void setFrame(FieldPrefixNSMLeafFrame frame);
+ public void setFrame(BTreeFieldPrefixNSMLeafFrame frame);
public int decodeFirstSlotField(int slot);
@@ -50,14 +51,14 @@
public int encodeSlotFields(int firstField, int secondField);
- public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple,
- IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+ public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
+ ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
FindTupleNoExactMatchPolicy matchPolicy);
public int insertSlot(int slot, int tupleOff);
// returns prefix slot number, returns TUPLE_UNCOMPRESSED if none found
- public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp);
+ public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp);
public int getTupleSlotStartOff();
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
index eb6c0ab..1292ff2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/compressors/FieldPrefixCompressor.java
@@ -24,11 +24,11 @@
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
import edu.uci.ics.hyracks.storage.am.btree.api.IFrameCompressor;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
public class FieldPrefixCompressor implements IFrameCompressor {
@@ -49,7 +49,7 @@
}
@Override
- public boolean compress(FieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception {
+ public boolean compress(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp) throws Exception {
int tupleCount = frame.getTupleCount();
if (tupleCount <= 0) {
frame.setPrefixTupleCount(0);
@@ -365,7 +365,7 @@
// the prefix length may be different for different keypartitions
// the occurrenceThreshold determines the minimum number of tuples that must
// share a common prefix in order for us to consider compressing them
- private ArrayList<KeyPartition> getKeyPartitions(FieldPrefixNSMLeafFrame frame, MultiComparator cmp,
+ private ArrayList<KeyPartition> getKeyPartitions(BTreeFieldPrefixNSMLeafFrame frame, MultiComparator cmp,
int occurrenceThreshold) {
IBinaryComparator[] cmps = cmp.getComparators();
int fieldCount = cmp.getKeyFieldCount();
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java
deleted file mode 100644
index 35e6951..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/AbstractBTreeOperatorDescriptor.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-public abstract class AbstractBTreeOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- protected final IFileSplitProvider fileSplitProvider;
-
- protected final IBinaryComparatorFactory[] comparatorFactories;
-
- protected final IBTreeInteriorFrameFactory interiorFrameFactory;
- protected final IBTreeLeafFrameFactory leafFrameFactory;
-
- protected final IStorageManagerInterface storageManager;
- protected final IBTreeRegistryProvider btreeRegistryProvider;
-
- protected final ITypeTrait[] typeTraits;
-
- public AbstractBTreeOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
- RecordDescriptor recDesc, IStorageManagerInterface storageManager,
- IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider,
- IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories) {
- super(spec, inputArity, outputArity);
- this.fileSplitProvider = fileSplitProvider;
- this.storageManager = storageManager;
- this.btreeRegistryProvider = btreeRegistryProvider;
- this.interiorFrameFactory = interiorFactory;
- this.leafFrameFactory = leafFactory;
- this.typeTraits = typeTraits;
- this.comparatorFactories = comparatorFactories;
- if (outputArity > 0)
- recordDescriptors[0] = recDesc;
- }
-
- public IFileSplitProvider getFileSplitProvider() {
- return fileSplitProvider;
- }
-
- public IBinaryComparatorFactory[] getComparatorFactories() {
- return comparatorFactories;
- }
-
- public ITypeTrait[] getTypeTraits() {
- return typeTraits;
- }
-
- public IBTreeInteriorFrameFactory getInteriorFactory() {
- return interiorFrameFactory;
- }
-
- public IBTreeLeafFrameFactory getLeafFactory() {
- return leafFrameFactory;
- }
-
- public IStorageManagerInterface getStorageManager() {
- return storageManager;
- }
-
- public IBTreeRegistryProvider getBtreeRegistryProvider() {
- return btreeRegistryProvider;
- }
-
- public RecordDescriptor getRecordDescriptor() {
- return recordDescriptors[0];
- }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java
deleted file mode 100644
index 6e13ed7..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorNodePushable.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-
-public class BTreeBulkLoadOperatorNodePushable extends
- AbstractUnaryInputSinkOperatorNodePushable {
- private float fillFactor;
- private final BTreeOpHelper btreeOpHelper;
- private FrameTupleAccessor accessor;
- private BTree.BulkLoadContext bulkLoadCtx;
-
- private IRecordDescriptorProvider recordDescProvider;
-
- private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
-
- public BTreeBulkLoadOperatorNodePushable(
- AbstractBTreeOperatorDescriptor opDesc,
- IHyracksStageletContext ctx, int partition, int[] fieldPermutation,
- float fillFactor, IRecordDescriptorProvider recordDescProvider) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition,
- BTreeOpHelper.BTreeMode.CREATE_BTREE);
- this.fillFactor = fillFactor;
- this.recordDescProvider = recordDescProvider;
- tuple.setFieldPermutation(fieldPermutation);
- }
-
- @Override
- public void open() throws HyracksDataException {
- AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper
- .getOperatorDescriptor();
- RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(
- opDesc.getOperatorId(), 0);
- accessor = new FrameTupleAccessor(btreeOpHelper
- .getHyracksStageletContext().getFrameSize(), recDesc);
- IBTreeMetaDataFrame metaFrame = new MetaDataFrame();
- try {
- btreeOpHelper.init();
- btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
- bulkLoadCtx = btreeOpHelper.getBTree().beginBulkLoad(fillFactor,
- btreeOpHelper.getLeafFrame(),
- btreeOpHelper.getInteriorFrame(), metaFrame);
- } catch (Exception e) {
- // cleanup in case of failure
- btreeOpHelper.deinit();
- throw new HyracksDataException(e);
- }
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- accessor.reset(buffer);
- int tupleCount = accessor.getTupleCount();
- for (int i = 0; i < tupleCount; i++) {
- tuple.reset(accessor, i);
- btreeOpHelper.getBTree().bulkLoadAddTuple(bulkLoadCtx, tuple);
- }
- }
-
- @Override
- public void close() throws HyracksDataException {
- try {
- btreeOpHelper.getBTree().endBulkLoad(bulkLoadCtx);
- } finally {
- btreeOpHelper.deinit();
- }
- }
-
- @Override
- public void flush() throws HyracksDataException {
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java
deleted file mode 100644
index 17fbf97..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorDescriptor.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-public class BTreeDiskOrderScanOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- public BTreeDiskOrderScanOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
- IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits) {
- super(spec, 0, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
- leafFactory, typeTraits, null);
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new BTreeDiskOrderScanOperatorNodePushable(this, ctx, partition);
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java
deleted file mode 100644
index c362183..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDiskOrderScanOperatorNodePushable.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.io.DataOutput;
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.DiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-
-public class BTreeDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
- private final BTreeOpHelper btreeOpHelper;
-
- public BTreeDiskOrderScanOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksStageletContext ctx,
- int partition) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.OPEN_BTREE);
- }
-
- @Override
- public void initialize() throws HyracksDataException {
-
- IBTreeLeafFrame cursorFrame = btreeOpHelper.getOperatorDescriptor().getLeafFactory().getFrame();
- DiskOrderScanCursor cursor = new DiskOrderScanCursor(cursorFrame);
- IBTreeMetaDataFrame metaFrame = new MetaDataFrame();
-
- try {
-
- btreeOpHelper.init();
-
- try {
- btreeOpHelper.getBTree().diskOrderScan(cursor, cursorFrame, metaFrame);
-
- MultiComparator cmp = btreeOpHelper.getBTree().getMultiComparator();
- ByteBuffer frame = btreeOpHelper.getHyracksStageletContext().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(btreeOpHelper.getHyracksStageletContext().getFrameSize());
- appender.reset(frame, true);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- while (cursor.hasNext()) {
- tb.reset();
- cursor.next();
-
- ITupleReference frameTuple = cursor.getTuple();
- for (int i = 0; i < frameTuple.getFieldCount(); i++) {
- dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
- tb.addFieldEndOffset();
- }
-
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
-
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- }
- finally {
- cursor.close();
- writer.close();
- }
-
- } catch(Exception e) {
- deinitialize();
- throw new HyracksDataException(e);
- }
- }
-
- @Override
- public void deinitialize() throws HyracksDataException {
- btreeOpHelper.deinit();
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java
deleted file mode 100644
index c087fdd..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorNodePushable.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.comm.IFrameWriter;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-
-public class BTreeDropOperatorNodePushable extends AbstractOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(BTreeDropOperatorNodePushable.class.getName());
-
- private final IHyracksStageletContext ctx;
- private IBTreeRegistryProvider btreeRegistryProvider;
- private IStorageManagerInterface storageManager;
- private IFileSplitProvider fileSplitProvider;
- private int partition;
-
- public BTreeDropOperatorNodePushable(IHyracksStageletContext ctx, IStorageManagerInterface storageManager,
- IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider, int partition) {
- this.ctx = ctx;
- this.storageManager = storageManager;
- this.btreeRegistryProvider = btreeRegistryProvider;
- this.fileSplitProvider = fileSplitProvider;
- this.partition = partition;
- }
-
- @Override
- public void deinitialize() throws HyracksDataException {
- }
-
- @Override
- public int getInputArity() {
- return 0;
- }
-
- @Override
- public IFrameWriter getInputFrameWriter(int index) {
- return null;
- }
-
- @Override
- public void initialize() throws HyracksDataException {
- try {
-
- BTreeRegistry btreeRegistry = btreeRegistryProvider.getBTreeRegistry(ctx);
- IBufferCache bufferCache = storageManager.getBufferCache(ctx);
- IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
-
- FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
-
- boolean fileIsMapped = fileMapProvider.isMapped(f);
- if (!fileIsMapped) {
- throw new HyracksDataException("Cannot drop B-Tree with name " + f.toString() + ". No file mapping exists.");
- }
-
- int btreeFileId = fileMapProvider.lookupFileId(f);
-
- // unregister btree instance
- btreeRegistry.lock();
- try {
- btreeRegistry.unregister(btreeFileId);
- } finally {
- btreeRegistry.unlock();
- }
-
- // remove name to id mapping
- bufferCache.deleteFile(btreeFileId);
- }
- // TODO: for the time being we don't throw,
- // with proper exception handling (no hanging job problem) we should throw
- catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
- LOGGER.warning("BTRee Drop Operator Failed Due To Exception: " + e.getMessage());
- }
- }
- }
-
- @Override
- public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java
deleted file mode 100644
index c105274..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorDescriptor.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
-import edu.uci.ics.hyracks.api.job.JobSpecification;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-
-// re-create in-memory state for a btree that has already been built (i.e., the file exists):
-// 1. register files in file manager (FileManager)
-// 2. create file mappings (FileMappingProvider)
-// 3. register btree instance (BTreeRegistry)
-
-public class BTreeFileEnlistmentOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
-
- private static final long serialVersionUID = 1L;
-
- public BTreeFileEnlistmentOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
- IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories) {
- super(spec, 0, 0, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
- leafFactory, typeTraits, comparatorFactories);
- }
-
- @Override
- public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
- IRecordDescriptorProvider recordDescProvider, int partition, int partitions) throws HyracksDataException {
- return new BTreeFileEnlistmentOperatorNodePushable(this, ctx, partition);
- }
-
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java
deleted file mode 100644
index acb7d0f..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorNodePushable.java
+++ /dev/null
@@ -1,117 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
-
-import java.nio.ByteBuffer;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
-import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-
-public class BTreeInsertUpdateDeleteOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private final BTreeOpHelper btreeOpHelper;
- private FrameTupleAccessor accessor;
- private final IRecordDescriptorProvider recordDescProvider;
- private final BTreeOp op;
- private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
- private ByteBuffer writeBuffer;
- private BTreeOpContext opCtx;
-
- public BTreeInsertUpdateDeleteOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc,
- IHyracksStageletContext ctx, int partition, int[] fieldPermutation,
- IRecordDescriptorProvider recordDescProvider, BTreeOp op) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.OPEN_BTREE);
- this.recordDescProvider = recordDescProvider;
- this.op = op;
- tuple.setFieldPermutation(fieldPermutation);
- }
-
- @Override
- public void open() throws HyracksDataException {
- AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper.getOperatorDescriptor();
- RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
- accessor = new FrameTupleAccessor(btreeOpHelper.getHyracksStageletContext().getFrameSize(), inputRecDesc);
- writeBuffer = btreeOpHelper.getHyracksStageletContext().allocateFrame();
- try {
- btreeOpHelper.init();
- btreeOpHelper.getBTree().open(btreeOpHelper.getBTreeFileId());
- opCtx = btreeOpHelper.getBTree().createOpContext(op, btreeOpHelper.getLeafFrame(),
- btreeOpHelper.getInteriorFrame(), new MetaDataFrame());
- } catch(Exception e) {
- // cleanup in case of failure
- btreeOpHelper.deinit();
- throw new HyracksDataException(e);
- }
- }
-
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- final BTree btree = btreeOpHelper.getBTree();
- accessor.reset(buffer);
-
- int tupleCount = accessor.getTupleCount();
- for (int i = 0; i < tupleCount; i++) {
- tuple.reset(accessor, i);
- try {
- switch (op) {
-
- case BTO_INSERT: {
- btree.insert(tuple, opCtx);
- }
- break;
-
- case BTO_DELETE: {
- btree.delete(tuple, opCtx);
- }
- break;
-
- default: {
- throw new HyracksDataException("Unsupported operation " + op
- + " in BTree InsertUpdateDelete operator");
- }
-
- }
-
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- // pass a copy of the frame to next op
- System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
- FrameUtils.flushFrame(writeBuffer, writer);
- }
-
- @Override
- public void close() throws HyracksDataException {
- try {
- writer.close();
- } finally {
- btreeOpHelper.deinit();
- }
- }
-
- @Override
- public void flush() throws HyracksDataException {
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
index 880cc25..c85c6a0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelper.java
@@ -1,161 +1,32 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
package edu.uci.ics.hyracks.storage.am.btree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrame;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-final class BTreeOpHelper {
+public class BTreeOpHelper extends TreeIndexOpHelper {
- public enum BTreeMode {
- OPEN_BTREE, CREATE_BTREE, ENLIST_BTREE
- }
-
- private IBTreeInteriorFrame interiorFrame;
- private IBTreeLeafFrame leafFrame;
-
- private BTree btree;
- private int btreeFileId = -1;
- private int partition;
-
- private AbstractBTreeOperatorDescriptor opDesc;
- private IHyracksStageletContext ctx;
-
- private BTreeMode mode;
-
- BTreeOpHelper(AbstractBTreeOperatorDescriptor opDesc, final IHyracksStageletContext ctx, int partition,
- BTreeMode mode) {
- this.opDesc = opDesc;
- this.ctx = ctx;
- this.mode = mode;
- this.partition = partition;
+ public BTreeOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, IHyracksStageletContext ctx, int partition,
+ IndexHelperOpenMode mode) {
+ super(opDesc, ctx, partition, mode);
}
-
- void init() throws HyracksDataException {
+
+ public ITreeIndex createTreeIndex() throws HyracksDataException {
IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
- IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
- IFileSplitProvider fileSplitProvider = opDesc.getFileSplitProvider();
-
- FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
- boolean fileIsMapped = fileMapProvider.isMapped(f);
-
- switch (mode) {
-
- case OPEN_BTREE: {
- if (!fileIsMapped) {
- throw new HyracksDataException(
- "Trying to open btree from unmapped file " + f.toString());
- }
- }
- break;
-
- case CREATE_BTREE:
- case ENLIST_BTREE: {
- if (!fileIsMapped) {
- bufferCache.createFile(f);
- }
- }
- break;
-
- }
-
- btreeFileId = fileMapProvider.lookupFileId(f);
- bufferCache.openFile(btreeFileId);
-
- interiorFrame = opDesc.getInteriorFactory().getFrame();
- leafFrame = opDesc.getLeafFactory().getFrame();
-
- BTreeRegistry btreeRegistry = opDesc.getBtreeRegistryProvider().getBTreeRegistry(ctx);
- btree = btreeRegistry.get(btreeFileId);
- if (btree == null) {
-
- // create new btree and register it
- btreeRegistry.lock();
- try {
- // check if btree has already been registered by another thread
- btree = btreeRegistry.get(btreeFileId);
- if (btree == null) {
- // this thread should create and register the btree
-
- IBinaryComparator[] comparators = new IBinaryComparator[opDesc
- .getComparatorFactories().length];
- for (int i = 0; i < opDesc.getComparatorFactories().length; i++) {
- comparators[i] = opDesc.getComparatorFactories()[i]
- .createBinaryComparator();
- }
-
- MultiComparator cmp = new MultiComparator(opDesc
- .getTypeTraits(), comparators);
-
- btree = new BTree(bufferCache, opDesc.getInteriorFactory(),
- opDesc.getLeafFactory(), cmp);
- if (mode == BTreeMode.CREATE_BTREE) {
- MetaDataFrame metaFrame = new MetaDataFrame();
- try {
- btree.create(btreeFileId, leafFrame, metaFrame);
- btree.open(btreeFileId);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
- btreeRegistry.register(btreeFileId, btree);
- }
- } finally {
- btreeRegistry.unlock();
- }
- }
- }
-
- public void deinit() throws HyracksDataException {
- if (btreeFileId != -1) {
- IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
- bufferCache.closeFile(btreeFileId);
- }
+ ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, indexFileId, 0, metaDataFrameFactory);
+ return new BTree(bufferCache, freePageManager, opDesc.getTreeIndexInteriorFactory(),
+ opDesc.getTreeIndexLeafFactory(), cmp);
}
-
- public BTree getBTree() {
- return btree;
- }
-
- public IHyracksStageletContext getHyracksStageletContext() {
- return ctx;
- }
-
- public AbstractBTreeOperatorDescriptor getOperatorDescriptor() {
- return opDesc;
- }
-
- public IBTreeLeafFrame getLeafFrame() {
- return leafFrame;
- }
-
- public IBTreeInteriorFrame getInteriorFrame() {
- return interiorFrame;
- }
-
- public int getBTreeFileId() {
- return btreeFileId;
- }
-}
\ No newline at end of file
+
+
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java
new file mode 100644
index 0000000..7802b81
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+
+public class BTreeOpHelperFactory implements ITreeIndexOpHelperFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+ IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
+ return new BTreeOpHelper(opDesc, ctx, partition, mode);
+ }
+
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
index 21f37a3..254935b 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorDescriptor.java
@@ -24,11 +24,14 @@
import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-public class BTreeSearchOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
+public class BTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
private static final long serialVersionUID = 1L;
@@ -40,13 +43,13 @@
private boolean highKeyInclusive;
public BTreeSearchOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
- IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
+ IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
IBinaryComparatorFactory[] comparatorFactories, boolean isForward, int[] lowKeyFields, int[] highKeyFields,
- boolean lowKeyInclusive, boolean highKeyInclusive) {
- super(spec, 1, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
- leafFactory, typeTraits, comparatorFactories);
+ boolean lowKeyInclusive, boolean highKeyInclusive, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
this.isForward = isForward;
this.lowKeyFields = lowKeyFields;
this.highKeyFields = highKeyFields;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
index 415f169..9d33bb2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeSearchOperatorNodePushable.java
@@ -28,203 +28,187 @@
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public class BTreeSearchOperatorNodePushable extends
- AbstractUnaryInputUnaryOutputOperatorNodePushable {
- private BTreeOpHelper btreeOpHelper;
- private FrameTupleAccessor accessor;
+public class BTreeSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+ private TreeIndexOpHelper treeIndexOpHelper;
+ private FrameTupleAccessor accessor;
- private ByteBuffer writeBuffer;
- private FrameTupleAppender appender;
- private ArrayTupleBuilder tb;
- private DataOutput dos;
+ private ByteBuffer writeBuffer;
+ private FrameTupleAppender appender;
+ private ArrayTupleBuilder tb;
+ private DataOutput dos;
- private BTree btree;
- private boolean isForward;
- private PermutingFrameTupleReference lowKey;
- private PermutingFrameTupleReference highKey;
- private boolean lowKeyInclusive;
- private boolean highKeyInclusive;
- private RangePredicate rangePred;
- private MultiComparator lowKeySearchCmp;
- private MultiComparator highKeySearchCmp;
- private IBTreeCursor cursor;
- private IBTreeLeafFrame cursorFrame;
- private BTreeOpContext opCtx;
+ private BTree btree;
+ private boolean isForward;
+ private PermutingFrameTupleReference lowKey;
+ private PermutingFrameTupleReference highKey;
+ private boolean lowKeyInclusive;
+ private boolean highKeyInclusive;
+ private RangePredicate rangePred;
+ private MultiComparator lowKeySearchCmp;
+ private MultiComparator highKeySearchCmp;
+ private ITreeIndexCursor cursor;
+ private ITreeIndexFrame cursorFrame;
+ private BTreeOpContext opCtx;
- private RecordDescriptor recDesc;
+ private RecordDescriptor recDesc;
- public BTreeSearchOperatorNodePushable(
- AbstractBTreeOperatorDescriptor opDesc,
- IHyracksStageletContext ctx, int partition,
- IRecordDescriptorProvider recordDescProvider, boolean isForward,
- int[] lowKeyFields, int[] highKeyFields, boolean lowKeyInclusive,
- boolean highKeyInclusive) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition,
- BTreeOpHelper.BTreeMode.OPEN_BTREE);
- this.isForward = isForward;
- this.lowKeyInclusive = lowKeyInclusive;
- this.highKeyInclusive = highKeyInclusive;
- this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc
- .getOperatorId(), 0);
- if (lowKeyFields != null && lowKeyFields.length > 0) {
- lowKey = new PermutingFrameTupleReference();
- lowKey.setFieldPermutation(lowKeyFields);
- }
- if (highKeyFields != null && highKeyFields.length > 0) {
- highKey = new PermutingFrameTupleReference();
- highKey.setFieldPermutation(highKeyFields);
- }
- }
+ public BTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
+ int partition, IRecordDescriptorProvider recordDescProvider, boolean isForward, int[] lowKeyFields,
+ int[] highKeyFields, boolean lowKeyInclusive, boolean highKeyInclusive) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition, IndexHelperOpenMode.OPEN);
+ this.isForward = isForward;
+ this.lowKeyInclusive = lowKeyInclusive;
+ this.highKeyInclusive = highKeyInclusive;
+ this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ if (lowKeyFields != null && lowKeyFields.length > 0) {
+ lowKey = new PermutingFrameTupleReference();
+ lowKey.setFieldPermutation(lowKeyFields);
+ }
+ if (highKeyFields != null && highKeyFields.length > 0) {
+ highKey = new PermutingFrameTupleReference();
+ highKey.setFieldPermutation(highKeyFields);
+ }
+ }
- @Override
- public void open() throws HyracksDataException {
- AbstractBTreeOperatorDescriptor opDesc = btreeOpHelper
- .getOperatorDescriptor();
- accessor = new FrameTupleAccessor(btreeOpHelper
- .getHyracksStageletContext().getFrameSize(), recDesc);
+ @Override
+ public void open() throws HyracksDataException {
+ AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
- cursorFrame = opDesc.getLeafFactory().getFrame();
- cursor = new RangeSearchCursor(cursorFrame);
+ cursorFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+ cursor = new BTreeRangeSearchCursor((IBTreeLeafFrame)cursorFrame);
- try {
+ try {
- btreeOpHelper.init();
- btree = btreeOpHelper.getBTree();
+ treeIndexOpHelper.init();
+ btree = (BTree)treeIndexOpHelper.getTreeIndex();
- // construct range predicate
+ // construct range predicate
- int lowKeySearchFields = btree.getMultiComparator()
- .getComparators().length;
- int highKeySearchFields = btree.getMultiComparator()
- .getComparators().length;
- if (lowKey != null)
- lowKeySearchFields = lowKey.getFieldCount();
- if (highKey != null)
- highKeySearchFields = highKey.getFieldCount();
+ int lowKeySearchFields = btree.getMultiComparator().getComparators().length;
+ int highKeySearchFields = btree.getMultiComparator().getComparators().length;
+ if (lowKey != null)
+ lowKeySearchFields = lowKey.getFieldCount();
+ if (highKey != null)
+ highKeySearchFields = highKey.getFieldCount();
- IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
- for (int i = 0; i < lowKeySearchFields; i++) {
- lowKeySearchComparators[i] = btree.getMultiComparator()
- .getComparators()[i];
- }
- lowKeySearchCmp = new MultiComparator(btree.getMultiComparator()
- .getTypeTraits(), lowKeySearchComparators);
+ IBinaryComparator[] lowKeySearchComparators = new IBinaryComparator[lowKeySearchFields];
+ for (int i = 0; i < lowKeySearchFields; i++) {
+ lowKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
+ }
+ lowKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(), lowKeySearchComparators);
- if (lowKeySearchFields == highKeySearchFields) {
- highKeySearchCmp = lowKeySearchCmp;
- } else {
- IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
- for (int i = 0; i < highKeySearchFields; i++) {
- highKeySearchComparators[i] = btree.getMultiComparator()
- .getComparators()[i];
- }
- highKeySearchCmp = new MultiComparator(btree
- .getMultiComparator().getTypeTraits(),
- highKeySearchComparators);
+ if (lowKeySearchFields == highKeySearchFields) {
+ highKeySearchCmp = lowKeySearchCmp;
+ } else {
+ IBinaryComparator[] highKeySearchComparators = new IBinaryComparator[highKeySearchFields];
+ for (int i = 0; i < highKeySearchFields; i++) {
+ highKeySearchComparators[i] = btree.getMultiComparator().getComparators()[i];
+ }
+ highKeySearchCmp = new MultiComparator(btree.getMultiComparator().getTypeTraits(),
+ highKeySearchComparators);
- }
+ }
- rangePred = new RangePredicate(isForward, null, null,
- lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
- highKeySearchCmp);
+ rangePred = new RangePredicate(isForward, null, null, lowKeyInclusive, highKeyInclusive, lowKeySearchCmp,
+ highKeySearchCmp);
- accessor = new FrameTupleAccessor(btreeOpHelper
- .getHyracksStageletContext().getFrameSize(), recDesc);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
- writeBuffer = btreeOpHelper.getHyracksStageletContext()
- .allocateFrame();
- tb = new ArrayTupleBuilder(btree.getMultiComparator()
- .getFieldCount());
- dos = tb.getDataOutput();
- appender = new FrameTupleAppender(btreeOpHelper
- .getHyracksStageletContext().getFrameSize());
- appender.reset(writeBuffer, true);
+ writeBuffer = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ tb = new ArrayTupleBuilder(btree.getMultiComparator().getFieldCount());
+ dos = tb.getDataOutput();
+ appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksStageletContext().getFrameSize());
+ appender.reset(writeBuffer, true);
- opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, btreeOpHelper
- .getLeafFrame(), btreeOpHelper.getInteriorFrame(), null);
+ opCtx = btree.createOpContext(IndexOp.SEARCH, treeIndexOpHelper.getLeafFrame(), treeIndexOpHelper
+ .getInteriorFrame(), null);
- } catch (Exception e) {
- btreeOpHelper.deinit();
- }
- }
+ } catch (Exception e) {
+ treeIndexOpHelper.deinit();
+ }
+ }
- private void writeSearchResults() throws Exception {
- while (cursor.hasNext()) {
- tb.reset();
- cursor.next();
+ private void writeSearchResults() throws Exception {
+ while (cursor.hasNext()) {
+ tb.reset();
+ cursor.next();
- ITupleReference frameTuple = cursor.getTuple();
- for (int i = 0; i < frameTuple.getFieldCount(); i++) {
- dos.write(frameTuple.getFieldData(i), frameTuple
- .getFieldStart(i), frameTuple.getFieldLength(i));
- tb.addFieldEndOffset();
- }
+ ITupleReference frameTuple = cursor.getTuple();
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize())) {
- FrameUtils.flushFrame(writeBuffer, writer);
- appender.reset(writeBuffer, true);
- if (!appender.append(tb.getFieldEndOffsets(),
- tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
- }
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ appender.reset(writeBuffer, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+ }
- @Override
- public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
- accessor.reset(buffer);
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
- int tupleCount = accessor.getTupleCount();
- try {
- for (int i = 0; i < tupleCount; i++) {
- if (lowKey != null)
- lowKey.reset(accessor, i);
- if (highKey != null)
- highKey.reset(accessor, i);
- rangePred.setLowKey(lowKey, lowKeyInclusive);
- rangePred.setHighKey(highKey, highKeyInclusive);
+ int tupleCount = accessor.getTupleCount();
+ try {
+ for (int i = 0; i < tupleCount; i++) {
+ if (lowKey != null)
+ lowKey.reset(accessor, i);
+ if (highKey != null)
+ highKey.reset(accessor, i);
+ rangePred.setLowKey(lowKey, lowKeyInclusive);
+ rangePred.setHighKey(highKey, highKeyInclusive);
- cursor.reset();
- btree.search(cursor, rangePred, opCtx);
- writeSearchResults();
- }
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
+ cursor.reset();
+ btree.search(cursor, rangePred, opCtx);
+ writeSearchResults();
+ }
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
- @Override
- public void close() throws HyracksDataException {
- try {
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(writeBuffer, writer);
- }
- writer.close();
- try {
- cursor.close();
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- } finally {
- btreeOpHelper.deinit();
- }
- }
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ writer.close();
+ try {
+ cursor.close();
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ } finally {
+ treeIndexOpHelper.deinit();
+ }
+ }
- @Override
- public void flush() throws HyracksDataException {
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(writeBuffer, writer);
- }
- }
+ @Override
+ public void flush() throws HyracksDataException {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ }
}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
similarity index 89%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index 6eeb5e1..bd4947c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -26,30 +26,30 @@
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.api.IFrameCompressor;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISlotManager;
import edu.uci.ics.hyracks.storage.am.btree.compressors.FieldPrefixCompressor;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixPrefixTupleReference;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SpaceStatus;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
// WARNING: only works when tupleWriter is an instance of TypeAwareTupleWriter
-public class FieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
+public class BTreeFieldPrefixNSMLeafFrame implements IBTreeLeafFrame {
protected static final int pageLsnOff = 0; // 0
protected static final int tupleCountOff = pageLsnOff + 4; // 4
@@ -69,12 +69,12 @@
public IPrefixSlotManager slotManager; // TODO: should be protected, but
// will trigger some refactoring
- private IBTreeTupleWriter tupleWriter;
+ private ITreeIndexTupleWriter tupleWriter;
private FieldPrefixTupleReference frameTuple;
private FieldPrefixPrefixTupleReference framePrefixTuple;
- public FieldPrefixNSMLeafFrame(IBTreeTupleWriter tupleWriter) {
+ public BTreeFieldPrefixNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
this.tupleWriter = tupleWriter;
this.frameTuple = new FieldPrefixTupleReference(tupleWriter.createTupleReference());
ITypeTrait[] typeTraits = ((TypeAwareTupleWriter) tupleWriter).getTypeTraits();
@@ -115,7 +115,7 @@
// 3. prefix tuple are sorted (last prefix tuple is at highest offset)
// this procedure will not move prefix tuples
@Override
- public void compact(MultiComparator cmp) {
+ public boolean compact(MultiComparator cmp) {
resetSpaceParams();
frameTuple.setFieldCount(cmp.getFieldCount());
@@ -168,11 +168,13 @@
slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
freeSpace += tupleLength;
}
-
+
buf.putInt(freeSpaceOff, freeSpace);
int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
- ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
buf.putInt(totalFreeSpaceOff, totalFreeSpace);
+
+ return false;
}
@Override
@@ -225,7 +227,7 @@
}
@Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
int freeContiguous = buf.capacity() - buf.getInt(freeSpaceOff)
- ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
@@ -233,11 +235,11 @@
// see if the tuple would fit uncompressed
if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
- return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
// see if tuple would fit into remaining space after compaction
if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
- return SpaceStatus.SUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
// see if the tuple matches a prefix and will fit after truncating the
// prefix
@@ -250,16 +252,16 @@
int compressedSize = tupleWriter.bytesRequired(tuple, numPrefixFields, tuple.getFieldCount()
- numPrefixFields);
if (compressedSize + slotManager.getSlotSize() <= freeContiguous)
- return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
}
- return SpaceStatus.INSUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
@Override
- public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
// TODO Auto-generated method stub
- return SpaceStatus.INSUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
protected void resetSpaceParams() {
@@ -290,12 +292,14 @@
}
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
-
- slot = slotManager.insertSlot(slot, buf.getInt(freeSpaceOff));
-
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ return slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
int numPrefixFields = 0;
if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
@@ -382,6 +386,11 @@
public boolean isLeaf() {
return buf.get(levelOff) == 0;
}
+
+ @Override
+ public boolean isInterior() {
+ return buf.get(levelOff) > 0;
+ }
@Override
public byte getLevel() {
@@ -446,31 +455,20 @@
}
@Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
throws Exception {
- FieldPrefixNSMLeafFrame rf = (FieldPrefixNSMLeafFrame) rightFrame;
+ BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
frameTuple.setFieldCount(cmp.getFieldCount());
-
- // before doing anything check if key already exists
- int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_EXACT,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int tupleSlotNum = slotManager.decodeSecondSlotField(slot);
- if (tupleSlotNum != FieldPrefixSlotManager.GREATEST_SLOT) {
- frameTuple.resetByTupleIndex(this, tupleSlotNum);
- if (cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key into unique index");
- }
- }
-
+
ByteBuffer right = rf.getBuffer();
int tupleCount = getTupleCount();
int prefixTupleCount = getPrefixTupleCount();
int tuplesToLeft;
int midSlotNum = tupleCount / 2;
- IBTreeFrame targetFrame = null;
+ ITreeIndexFrame targetFrame = null;
frameTuple.resetByTupleIndex(this, midSlotNum);
int comparison = cmp.compare(tuple, frameTuple);
if (comparison >= 0) {
@@ -573,7 +571,8 @@
rightFrame.compact(cmp);
// insert last key
- targetFrame.insert(tuple, cmp);
+ int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
+ targetFrame.insert(tuple, cmp, targetTupleIndex);
// set split key to be highest value in left page
frameTuple.resetByTupleIndex(this, getTupleCount() - 1);
@@ -581,7 +580,7 @@
int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
splitKey.initData(splitKeySize);
tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
+ splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
return 0;
}
@@ -638,17 +637,17 @@
frameTuple.setFieldCount(fieldCount);
}
- public IBTreeTupleWriter getTupleWriter() {
+ public ITreeIndexTupleWriter getTupleWriter() {
return tupleWriter;
}
@Override
- public IBTreeTupleReference createTupleReference() {
+ public ITreeIndexTupleReference createTupleReference() {
return new FieldPrefixTupleReference(tupleWriter.createTupleReference());
}
@Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
int slot = slotManager.findSlot(searchKey, pageTuple, framePrefixTuple, cmp, ftm, ftp);
int tupleIndex = slotManager.decodeSecondSlotField(slot);
@@ -658,4 +657,8 @@
return tupleIndex;
}
+ @Override
+ public int getPageHeaderSize() {
+ return nextLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
similarity index 63%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
copy to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
index 202f31a..05b43d3 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
@@ -16,20 +16,20 @@
package edu.uci.ics.hyracks.storage.am.btree.frames;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class NSMLeafFrameFactory implements IBTreeLeafFrameFactory {
+public class BTreeFieldPrefixNSMLeafFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
- public NSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ public BTreeFieldPrefixNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
}
@Override
- public IBTreeLeafFrame getFrame() {
- return new NSMLeafFrame(tupleWriterFactory.createTupleWriter());
+ public IBTreeLeafFrame createFrame() {
+ return new BTreeFieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
}
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
similarity index 76%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index 90d400d4..6025003 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -25,30 +25,31 @@
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SpaceStatus;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-public class NSMInteriorFrame extends NSMFrame implements IBTreeInteriorFrame {
+public class BTreeNSMInteriorFrame extends TreeIndexNSMFrame implements IBTreeInteriorFrame {
private static final int rightLeafOff = smFlagOff + 1;
private static final int childPtrSize = 4;
-
+
// private SimpleTupleReference cmpFrameTuple = new SimpleTupleReference();
- private IBTreeTupleReference cmpFrameTuple;
+ private ITreeIndexTupleReference cmpFrameTuple;
- public NSMInteriorFrame(IBTreeTupleWriter tupleWriter) {
- super(tupleWriter);
+ public BTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter) {
+ super(tupleWriter, new OrderedSlotManager());
cmpFrameTuple = tupleWriter.createTupleReference();
}
@@ -64,21 +65,20 @@
}
@Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
int bytesRequired = tupleWriter.bytesRequired(tuple) + 8; // for the two
// childpointers
if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
- return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
- return SpaceStatus.SUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
else
- return SpaceStatus.INSUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
int slotOff = slotManager.getSlotOff(tupleIndex);
@@ -87,46 +87,49 @@
if (tupleIndex < 0)
isDuplicate = false; // greater than all existing keys
else {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
if (cmp.compare(tuple, frameTuple) != 0)
isDuplicate = false;
}
-
if (isDuplicate) {
throw new BTreeException("Trying to insert duplicate value into interior node.");
- } else {
- slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf
- .array(), freeSpace + bytesWritten, childPtrSize);
- int tupleSize = bytesWritten + childPtrSize;
-
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
-
- // did insert into the rightmost slot?
- if (slotOff == slotManager.getSlotEndOff()) {
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
- + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
- } else {
- // if slotOff has a right (slot-)neighbor then update its child
- // pointer
- // the only time when this is NOT the case, is when this is the
- // first tuple
- // (or when the splitkey goes into the rightmost slot but that
- // case was handled in the if above)
- if (buf.getInt(tupleCountOff) > 1) {
- int rightNeighborOff = slotOff - slotManager.getSlotSize();
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(rightNeighborOff));
- System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize,
- buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
- }
- }
}
+ return tupleIndex;
}
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf
+ .array(), freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+
+ // did insert into the rightmost slot?
+ if (slotOff == slotManager.getSlotEndOff()) {
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
+ + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
+ } else {
+ // if slotOff has a right (slot-)neighbor then update its child
+ // pointer
+ // the only time when this is NOT the case, is when this is the
+ // first tuple
+ // (or when the splitkey goes into the rightmost slot but that
+ // case was handled in the if above)
+ if (buf.getInt(tupleCountOff) > 1) {
+ int rightNeighborOff = slotOff - slotManager.getSlotSize();
+ frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
+ System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize,
+ buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
+ }
+ }
+ }
+
@Override
public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
@@ -144,26 +147,17 @@
}
@Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
throws Exception {
// before doing anything check if key already exists
frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- if (tupleIndex >= 0) {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
- if (cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key in interior node during split");
- }
- }
-
+
ByteBuffer right = rightFrame.getBuffer();
int tupleCount = buf.getInt(tupleCountOff);
int tuplesToLeft = (tupleCount / 2) + (tupleCount % 2);
- IBTreeFrame targetFrame = null;
- frameTuple.resetByOffset(buf, getTupleOffset(tuplesToLeft - 1));
+ ITreeIndexFrame targetFrame = null;
+ frameTuple.resetByTupleOffset(buf, getTupleOffset(tuplesToLeft - 1));
if (cmp.compare(tuple, frameTuple) <= 0) {
targetFrame = this;
} else {
@@ -189,18 +183,18 @@
// copy data to be inserted, we need this because creating the splitkey
// will overwrite the data param (data points to same memory as
// splitKey.getData())
- SplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
+ ISplitKey savedSplitKey = splitKey.duplicate(tupleWriter.createTupleReference());
// set split key to be highest value in left page
int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
splitKey.initData(splitKeySize);
tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
+ splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
int deleteTupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, deleteTupleOff);
+ frameTuple.resetByTupleOffset(buf, deleteTupleOff);
buf.putInt(rightLeafOff, buf.getInt(getLeftChildPageOff(frameTuple, cmp)));
buf.putInt(tupleCountOff, tuplesToLeft - 1);
@@ -209,13 +203,14 @@
compact(cmp);
// insert key
- targetFrame.insert(savedSplitKey.getTuple(), cmp);
-
+ int targetTupleIndex = targetFrame.findTupleIndex(savedSplitKey.getTuple(), cmp);
+ targetFrame.insert(savedSplitKey.getTuple(), cmp, targetTupleIndex);
+
return 0;
}
@Override
- public void compact(MultiComparator cmp) {
+ public boolean compact(MultiComparator cmp) {
resetSpaceParams();
frameTuple.setFieldCount(cmp.getKeyFieldCount());
@@ -234,7 +229,7 @@
for (int i = 0; i < sortedTupleOffs.size(); i++) {
int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
@@ -247,6 +242,8 @@
buf.putInt(freeSpaceOff, freeSpace);
buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+
+ return false;
}
@Override
@@ -294,14 +291,14 @@
return buf.getInt(rightLeafOff);
} else {
int origTupleOff = slotManager.getTupleOff(slotOff);
- cmpFrameTuple.resetByOffset(buf, origTupleOff);
+ cmpFrameTuple.resetByTupleOffset(buf, origTupleOff);
int cmpTupleOff = origTupleOff;
if (pred.isForward()) {
int maxSlotOff = buf.capacity();
slotOff += slotManager.getSlotSize();
while (slotOff < maxSlotOff) {
cmpTupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, cmpTupleOff);
+ frameTuple.resetByTupleOffset(buf, cmpTupleOff);
if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
break;
slotOff += slotManager.getSlotSize();
@@ -312,15 +309,15 @@
slotOff -= slotManager.getSlotSize();
while (slotOff > minSlotOff) {
cmpTupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, cmpTupleOff);
+ frameTuple.resetByTupleOffset(buf, cmpTupleOff);
if (targetCmp.compare(cmpFrameTuple, frameTuple) != 0)
break;
slotOff -= slotManager.getSlotSize();
}
slotOff += slotManager.getSlotSize();
}
-
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+
+ frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
int childPageOff = getLeftChildPageOff(frameTuple, srcCmp);
return buf.getInt(childPageOff);
}
@@ -337,14 +334,14 @@
if (tupleIndex < 0) {
tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
// copy new rightmost pointer
System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
} else {
tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
// perform deletion (we just do a memcpy to overwrite the slot)
int slotStartOff = slotManager.getSlotEndOff();
@@ -368,7 +365,7 @@
public int getLeftmostChildPageId(MultiComparator cmp) {
int tupleOff = slotManager.getTupleOff(slotManager.getSlotStartOff());
frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int childPageOff = getLeftChildPageOff(frameTuple, cmp);
return buf.getInt(childPageOff);
}
@@ -390,7 +387,7 @@
int tupleCount = buf.getInt(tupleCountOff);
for (int i = 0; i < tupleCount; i++) {
int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int intVal = getInt(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
ret.add(intVal);
@@ -408,7 +405,7 @@
int slotOff = slotManager.getSlotEndOff();
int tupleOff = slotManager.getTupleOff(slotOff);
frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int keySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
System.arraycopy(buf.array(), tupleOff + keySize, buf.array(), rightLeafOff, childPtrSize);
@@ -447,4 +444,9 @@
strBuilder.append("\n");
return strBuilder.toString();
}
+
+ @Override
+ public int getPageHeaderSize() {
+ return rightLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
similarity index 64%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
index 86fa94f..6b30ee0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
@@ -16,20 +16,20 @@
package edu.uci.ics.hyracks.storage.am.btree.frames;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class NSMInteriorFrameFactory implements IBTreeInteriorFrameFactory {
+public class BTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
- public NSMInteriorFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ public BTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
}
@Override
- public IBTreeInteriorFrame getFrame() {
- return new NSMInteriorFrame(tupleWriterFactory.createTupleWriter());
+ public IBTreeInteriorFrame createFrame() {
+ return new BTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter());
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
similarity index 69%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index fd8f826..85fbec9 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -19,22 +19,23 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public class NSMLeafFrame extends NSMFrame implements IBTreeLeafFrame {
+public class BTreeNSMLeafFrame extends TreeIndexNSMFrame implements IBTreeLeafFrame {
protected static final int prevLeafOff = smFlagOff + 1;
protected static final int nextLeafOff = prevLeafOff + 4;
- public NSMLeafFrame(IBTreeTupleWriter tupleWriter) {
- super(tupleWriter);
+ public BTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter) {
+ super(tupleWriter, new OrderedSlotManager());
}
@Override
@@ -65,8 +66,8 @@
}
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
int slotOff = slotManager.getSlotOff(tupleIndex);
@@ -75,23 +76,25 @@
if (tupleIndex < 0)
isDuplicate = false; // greater than all existing keys
else {
- frameTuple.resetByOffset(buf, slotManager.getTupleOff(slotOff));
+ frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
if (cmp.compare(tuple, frameTuple) != 0)
isDuplicate = false;
}
-
if (isDuplicate) {
throw new BTreeException("Trying to insert duplicate value into leaf of unique index");
- } else {
- slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
-
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
-
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
}
+
+ return tupleIndex;
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
}
@Override
@@ -105,29 +108,19 @@
}
@Override
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
throws Exception {
frameTuple.setFieldCount(cmp.getFieldCount());
-
- // before doing anything check if key already exists
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- if (tupleIndex >= 0) {
- frameTuple.resetByTupleIndex(this, tupleIndex);
- if (cmp.compare(tuple, frameTuple) == 0) {
- throw new BTreeException("Inserting duplicate key into unique index");
- }
- }
-
+
ByteBuffer right = rightFrame.getBuffer();
int tupleCount = getTupleCount();
int tuplesToLeft;
int mid = tupleCount / 2;
- IBTreeFrame targetFrame = null;
+ ITreeIndexFrame targetFrame = null;
int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff() + slotManager.getSlotSize() * mid);
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
if (cmp.compare(tuple, frameTuple) >= 0) {
tuplesToLeft = mid + (tupleCount % 2);
targetFrame = rightFrame;
@@ -156,17 +149,18 @@
compact(cmp);
// insert last key
- targetFrame.insert(tuple, cmp);
+ int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
+ targetFrame.insert(tuple, cmp, targetTupleIndex);
// set split key to be highest value in left page
tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
splitKey.initData(splitKeySize);
tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
- splitKey.getTuple().resetByOffset(splitKey.getBuffer(), 0);
-
+ splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
+
return 0;
}
@@ -177,13 +171,18 @@
}
@Override
- public IBTreeTupleReference createTupleReference() {
+ public ITreeIndexTupleReference createTupleReference() {
return tupleWriter.createTupleReference();
}
@Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference pageTuple, MultiComparator cmp,
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference pageTuple, MultiComparator cmp,
FindTupleMode ftm, FindTupleNoExactMatchPolicy ftp) {
return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
}
+
+ @Override
+ public int getPageHeaderSize() {
+ return nextLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
similarity index 64%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
index 202f31a..d59b391 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
@@ -16,20 +16,20 @@
package edu.uci.ics.hyracks.storage.am.btree.frames;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class NSMLeafFrameFactory implements IBTreeLeafFrameFactory {
+public class BTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
- public NSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
+ public BTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
}
@Override
- public IBTreeLeafFrame getFrame() {
- return new NSMLeafFrame(tupleWriterFactory.createTupleWriter());
+ public IBTreeLeafFrame createFrame() {
+ return new BTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter());
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java
deleted file mode 100644
index a3084ab..0000000
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/FieldPrefixNSMLeafFrameFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.frames;
-
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
-
-public class FieldPrefixNSMLeafFrameFactory implements IBTreeLeafFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private IBTreeTupleWriterFactory tupleWriterFactory;
-
- public FieldPrefixNSMLeafFrameFactory(IBTreeTupleWriterFactory tupleWriterFactory) {
- this.tupleWriterFactory = tupleWriterFactory;
- }
-
- @Override
- public IBTreeLeafFrame getFrame() {
- return new FieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
similarity index 66%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
index 258161b..89a70e0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/OrderedSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/OrderedSlotManager.java
@@ -13,20 +13,24 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.btree.frames;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISlotManager;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.frames.AbstractSlotManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public class OrderedSlotManager implements ISlotManager {
-
- private static final int slotSize = 4;
- private IBTreeFrame frame;
-
- @Override
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp,
+public class OrderedSlotManager extends AbstractSlotManager {
+
+ @Override
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
if (frame.getTupleCount() <= 0)
return -1;
@@ -34,12 +38,12 @@
int mid;
int begin = 0;
int end = frame.getTupleCount() - 1;
-
+
while (begin <= end) {
mid = (begin + end) / 2;
- frameTuple.resetByTupleIndex(frame, mid);
-
- int cmp = multiCmp.compare(searchKey, frameTuple);
+ frameTuple.resetByTupleIndex(frame, mid);
+
+ int cmp = multiCmp.compare(searchKey, frameTuple);
if (cmp < 0) {
end = mid - 1;
} else if (cmp > 0) {
@@ -77,32 +81,7 @@
return -1;
}
}
-
- @Override
- public int getTupleOff(int offset) {
- return frame.getBuffer().getInt(offset);
- }
-
- @Override
- public void setSlot(int offset, int value) {
- frame.getBuffer().putInt(offset, value);
- }
-
- @Override
- public int getSlotEndOff() {
- return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
- }
-
- @Override
- public int getSlotStartOff() {
- return frame.getBuffer().capacity() - slotSize;
- }
-
- @Override
- public int getSlotSize() {
- return slotSize;
- }
-
+
@Override
public int insertSlot(int tupleIndex, int tupleOff) {
int slotOff = getSlotOff(tupleIndex);
@@ -119,14 +98,4 @@
return slotOff;
}
}
-
- @Override
- public void setFrame(IBTreeFrame frame) {
- this.frame = frame;
- }
-
- @Override
- public int getSlotOff(int tupleIndex) {
- return getSlotStartOff() - tupleIndex * slotSize;
- }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 7a8ee73..2831ff6 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -22,35 +22,47 @@
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-public class BTree {
+public class BTree implements ITreeIndex {
+
+ public static final float DEFAULT_FILL_FACTOR = 0.7f;
private final static int RESTART_OP = Integer.MIN_VALUE;
private final static int MAX_RESTARTS = 10;
- private final int metaDataPage = 0; // page containing meta data, e.g.,
- // maxPage
- private final int rootPage = 1; // the root page never changes
+ // the root page never changes
+ private final int rootPage = 1;
+
+ private final IFreePageManager freePageManager;
private boolean created = false;
private boolean loaded = false;
private final IBufferCache bufferCache;
private int fileId;
- private final IBTreeInteriorFrameFactory interiorFrameFactory;
- private final IBTreeLeafFrameFactory leafFrameFactory;
+ private final ITreeIndexFrameFactory interiorFrameFactory;
+ private final ITreeIndexFrameFactory leafFrameFactory;
private final MultiComparator cmp;
private final ReadWriteLock treeLatch;
private final RangePredicate diskOrderScanPredicate;
@@ -90,17 +102,19 @@
return strBuilder.toString();
}
- public BTree(IBufferCache bufferCache, IBTreeInteriorFrameFactory interiorFrameFactory,
- IBTreeLeafFrameFactory leafFrameFactory, MultiComparator cmp) {
+ public BTree(IBufferCache bufferCache, IFreePageManager freePageManager,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
this.bufferCache = bufferCache;
this.interiorFrameFactory = interiorFrameFactory;
this.leafFrameFactory = leafFrameFactory;
this.cmp = cmp;
+ this.freePageManager = freePageManager;
this.treeLatch = new ReentrantReadWriteLock(true);
this.diskOrderScanPredicate = new RangePredicate(true, null, null, true, true, cmp, cmp);
}
- public void create(int fileId, IBTreeLeafFrame leafFrame, IBTreeMetaDataFrame metaFrame) throws Exception {
+ @Override
+ public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
if (created)
return;
@@ -112,22 +126,7 @@
if (created)
return;
- // initialize meta data page
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- metaFrame.setPage(metaNode);
- metaFrame.initBuffer((byte) -1);
- metaFrame.setMaxPage(rootPage);
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(metaNode);
- unpins++;
- }
+ freePageManager.init(metaFrame, rootPage);
// initialize root page
ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
@@ -160,158 +159,16 @@
fileId = -1;
}
- private int getFreePage(IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
-
- int freePage = -1;
- try {
- metaFrame.setPage(metaNode);
- freePage = metaFrame.getFreePage();
- if (freePage < 0) { // no free page entry on this page
- int nextPage = metaFrame.getNextPage();
- if (nextPage > 0) { // sibling may have free pages
- ICachedPage nextNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextPage), false);
- pins++;
-
- nextNode.acquireWriteLatch();
- writeLatchesAcquired++;
- // we copy over the free space entries of nextpage into the
- // first meta page (metaDataPage)
- // we need to link the first page properly to the next page
- // of nextpage
- try {
- // remember entries that remain unchanged
- int maxPage = metaFrame.getMaxPage();
-
- // copy entire page (including sibling pointer, free
- // page entries, and all other info)
- // after this copy nextPage is considered a free page
- System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode
- .getBuffer().capacity());
-
- // reset unchanged entry
- metaFrame.setMaxPage(maxPage);
-
- freePage = metaFrame.getFreePage();
- // sibling also has no free pages, this "should" not
- // happen, but we deal with it anyway just to be safe
- if (freePage < 0) {
- freePage = nextPage;
- } else {
- metaFrame.addFreePage(nextPage);
- }
- } finally {
- nextNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(nextNode);
- unpins++;
- }
- } else {
- freePage = metaFrame.getMaxPage();
- freePage++;
- metaFrame.setMaxPage(freePage);
- }
- }
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(metaNode);
- unpins++;
- }
-
- return freePage;
- }
-
private void addFreePages(BTreeOpContext ctx) throws Exception {
for (int i = 0; i < ctx.freePages.size(); i++) {
- addFreePage(ctx.metaFrame, ctx.freePages.get(i));
+ // root page is special, don't add it to free pages
+ if (ctx.freePages.get(i) != rootPage) {
+ freePageManager.addFreePage(ctx.metaFrame, ctx.freePages.get(i));
+ }
}
ctx.freePages.clear();
}
- private void addFreePage(IBTreeMetaDataFrame metaFrame, int freePage) throws Exception {
- // root page is special, don't add it to free pages
- if (freePage == rootPage)
- return;
-
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
-
- metaFrame.setPage(metaNode);
-
- try {
- if (metaFrame.hasSpace()) {
- metaFrame.addFreePage(freePage);
- } else {
- // allocate a new page in the chain of meta pages
- int newPage = metaFrame.getFreePage();
- if (newPage < 0) {
- throw new Exception("Inconsistent Meta Page State. It has no space, but it also has no entries.");
- }
-
- ICachedPage newNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newPage), false);
- pins++;
-
- newNode.acquireWriteLatch();
- writeLatchesAcquired++;
-
- try {
- int metaMaxPage = metaFrame.getMaxPage();
-
- // copy metaDataPage to newNode
- System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode
- .getBuffer().capacity());
-
- metaFrame.initBuffer(-1);
- metaFrame.setNextPage(newPage);
- metaFrame.setMaxPage(metaMaxPage);
- metaFrame.addFreePage(freePage);
- } finally {
- newNode.releaseWriteLatch();
- writeLatchesReleased++;
-
- bufferCache.unpin(newNode);
- unpins++;
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
-
- bufferCache.unpin(metaNode);
- unpins++;
- }
- }
-
- public int getMaxPage(IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireWriteLatch();
- writeLatchesAcquired++;
- int maxPage = -1;
- try {
- metaFrame.setPage(metaNode);
- maxPage = metaFrame.getMaxPage();
- } finally {
- metaNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(metaNode);
- unpins++;
- }
-
- return maxPage;
- }
-
public void printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields)
throws Exception {
printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
@@ -352,7 +209,7 @@
System.out.format(keyString);
if (!interiorFrame.isLeaf()) {
- ArrayList<Integer> children = ((NSMInteriorFrame) (interiorFrame)).getChildren(cmp);
+ ArrayList<Integer> children = ((BTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
for (int i = 0; i < children.size(); i++) {
printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
@@ -374,27 +231,15 @@
}
}
- public void diskOrderScan(DiskOrderScanCursor cursor, IBTreeLeafFrame leafFrame, IBTreeMetaDataFrame metaFrame)
- throws HyracksDataException {
+ @Override
+ public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+ IndexOpContext ictx) throws HyracksDataException {
+ TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
+ ctx.reset();
+
int currentPageId = rootPage + 1;
- int maxPageId = -1;
-
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, metaDataPage), false);
- pins++;
-
- metaNode.acquireReadLatch();
- readLatchesAcquired++;
-
- try {
- metaFrame.setPage(metaNode);
- maxPageId = metaFrame.getMaxPage();
- } finally {
- metaNode.releaseReadLatch();
- readLatchesAcquired++;
-
- bufferCache.unpin(metaNode);
- unpins++;
- }
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
page.acquireReadLatch();
@@ -402,10 +247,11 @@
cursor.setFileId(fileId);
cursor.setCurrentPageId(currentPageId);
cursor.setMaxPageId(maxPageId);
- cursor.open(page, diskOrderScanPredicate);
+ ctx.cursorInitialState.setPage(page);
+ cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
}
- public void search(IBTreeCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
+ public void search(ITreeIndexCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
ctx.reset();
ctx.pred = pred;
ctx.cursor = cursor;
@@ -468,20 +314,21 @@
currentLevel++;
// make sure the root is always at the same level
- ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
+ ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
+ false);
pins++;
leftNode.acquireWriteLatch(); // TODO: think about whether latching is
// really required
writeLatchesAcquired++;
try {
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getRightPage()),
- false);
+ ICachedPage rightNode = bufferCache.pin(
+ BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getRightPage()), false);
pins++;
rightNode.acquireWriteLatch(); // TODO: think about whether latching
// is really required
writeLatchesAcquired++;
try {
- int newLeftId = getFreePage(ctx.metaFrame);
+ int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
pins++;
newLeftNode.acquireWriteLatch(); // TODO: think about whether
@@ -507,7 +354,8 @@
ctx.interiorFrame.setSmFlag(true); // will be cleared later
// in unsetSmPages
ctx.splitKey.setLeftPage(newLeftId);
- ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp);
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(ctx.splitKey.getTuple(), cmp);
+ ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp, targetTupleIndex);
} finally {
newLeftNode.releaseWriteLatch();
writeLatchesReleased++;
@@ -528,7 +376,9 @@
}
}
- public void insert(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ @Override
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
ctx.reset();
ctx.pred.setLowKeyComparator(cmp);
ctx.pred.setHighKeyComparator(cmp);
@@ -566,20 +416,24 @@
private void insertLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
ctx.leafFrame.setPage(node);
ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
- SpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+
+ int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
switch (spaceStatus) {
case SUFFICIENT_CONTIGUOUS_SPACE: {
// System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
- ctx.leafFrame.insert(tuple, cmp);
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
}
break;
case SUFFICIENT_SPACE: {
// System.out.println("SUFFICIENT_SPACE");
- ctx.leafFrame.compact(cmp);
- ctx.leafFrame.insert(tuple, cmp);
+ boolean slotsChanged = ctx.leafFrame.compact(cmp);
+ if (slotsChanged)
+ targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
}
break;
@@ -595,8 +449,8 @@
if (reCompressed)
spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
- if (spaceStatus == SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
- ctx.leafFrame.insert(tuple, cmp);
+ if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
usefulCompression++;
@@ -610,7 +464,8 @@
int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
ICachedPage rightSibling = null;
if (rightSiblingPageId > 0) {
- rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId), false);
+ rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
+ false);
pins++;
}
@@ -620,13 +475,14 @@
treeLatchesAcquired++;
try {
- int rightPageId = getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+ true);
pins++;
rightNode.acquireWriteLatch();
writeLatchesAcquired++;
try {
- IBTreeLeafFrame rightFrame = leafFrameFactory.getFrame();
+ IBTreeLeafFrame rightFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
rightFrame.setPage(rightNode);
rightFrame.initBuffer((byte) 0);
rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
@@ -704,17 +560,19 @@
throws Exception {
ctx.interiorFrame.setPage(node);
ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- SpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
switch (spaceStatus) {
case INSUFFICIENT_SPACE: {
splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
- int rightPageId = getFreePage(ctx.metaFrame);
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
pins++;
rightNode.acquireWriteLatch();
writeLatchesAcquired++;
try {
- IBTreeFrame rightFrame = interiorFrameFactory.getFrame();
+ ITreeIndexFrame rightFrame = interiorFrameFactory.createFrame();
rightFrame.setPage(rightNode);
rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
@@ -753,14 +611,16 @@
case SUFFICIENT_CONTIGUOUS_SPACE: {
// System.out.println("INSERT INTERIOR: " + pageId);
- ctx.interiorFrame.insert(tuple, cmp);
+ ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
}
break;
case SUFFICIENT_SPACE: {
- ctx.interiorFrame.compact(cmp);
- ctx.interiorFrame.insert(tuple, cmp);
+ boolean slotsChanged = ctx.interiorFrame.compact(cmp);
+ if (slotsChanged)
+ targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
}
break;
@@ -768,7 +628,9 @@
}
}
- public void delete(ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ @Override
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
ctx.reset();
ctx.pred.setLowKeyComparator(cmp);
ctx.pred.setHighKeyComparator(cmp);
@@ -822,7 +684,7 @@
// will this leaf become empty?
if (ctx.leafFrame.getTupleCount() == 1) {
- IBTreeLeafFrame siblingFrame = leafFrameFactory.getFrame();
+ IBTreeLeafFrame siblingFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
ICachedPage leftNode = null;
ICachedPage rightNode = null;
@@ -855,11 +717,9 @@
throw e;
}
- ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1); // TODO:
- // tie
- // together
- // with
- // logging
+ // TODO: tie together with loggins
+ ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+ ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
ctx.smPages.add(pageId);
ctx.leafFrame.setSmFlag(true);
@@ -937,6 +797,7 @@
// together
// with
// logging
+ ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
ctx.smPages.add(pageId);
ctx.interiorFrame.setSmFlag(true);
ctx.interiorFrame.setRightmostChildPageId(-1); // this node is
@@ -955,8 +816,8 @@
}
}
- private final void acquireLatch(ICachedPage node, BTreeOp op, boolean isLeaf) {
- if (isLeaf && (op.equals(BTreeOp.BTO_INSERT) || op.equals(BTreeOp.BTO_DELETE))) {
+ private final void acquireLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
node.acquireWriteLatch();
writeLatchesAcquired++;
} else {
@@ -965,8 +826,8 @@
}
}
- private final void releaseLatch(ICachedPage node, BTreeOp op, boolean isLeaf) {
- if (isLeaf && (op.equals(BTreeOp.BTO_INSERT) || op.equals(BTreeOp.BTO_DELETE))) {
+ private final void releaseLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
node.releaseWriteLatch();
writeLatchesReleased++;
} else {
@@ -998,9 +859,13 @@
pins++;
ctx.interiorFrame.setPage(node);
- boolean isLeaf = ctx.interiorFrame.isLeaf();
- acquireLatch(node, ctx.op, isLeaf);
+ // this check performs an unprotected read in the page
+ // the following could happen: TODO fill out
+ boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
+ acquireLatch(node, ctx.op, unsafeIsLeaf);
boolean smFlag = ctx.interiorFrame.getSmFlag();
+ // re-check leafness after latching
+ boolean isLeaf = ctx.interiorFrame.isLeaf();
// remember trail of pageLsns, to unwind recursion in case of an ongoing
// structure modification
@@ -1053,7 +918,7 @@
switch (ctx.op) {
- case BTO_INSERT: {
+ case INSERT: {
if (ctx.splitKey.getBuffer() != null) {
node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
pins++;
@@ -1073,7 +938,7 @@
}
break;
- case BTO_DELETE: {
+ case DELETE: {
if (ctx.splitKey.getBuffer() != null) {
node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
pins++;
@@ -1093,7 +958,7 @@
}
break;
- case BTO_SEARCH: {
+ case SEARCH: {
// do nothing
}
break;
@@ -1107,7 +972,7 @@
ctx.opRestarts++;
System.out.println("ONGOING SM ON PAGE " + pageId + " AT LEVEL " + ctx.interiorFrame.getLevel()
+ ", RESTARTS: " + ctx.opRestarts);
- releaseLatch(node, ctx.op, isLeaf);
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
bufferCache.unpin(node);
unpins++;
@@ -1128,28 +993,29 @@
}
} else { // isLeaf and !smFlag
switch (ctx.op) {
- case BTO_INSERT: {
+ case INSERT: {
insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
}
break;
- case BTO_DELETE: {
+ case DELETE: {
deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
}
break;
- case BTO_SEARCH: {
- ctx.cursor.open(node, ctx.pred);
+ case SEARCH: {
+ ctx.cursorInitialState.setPage(node);
+ ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
}
break;
}
}
- } catch (BTreeException e) {
+ } catch (TreeIndexException e) {
// System.out.println("BTREE EXCEPTION");
// System.out.println(e.getMessage());
// e.printStackTrace();
if (!e.getHandled()) {
- releaseLatch(node, ctx.op, isLeaf);
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
bufferCache.unpin(node);
unpins++;
e.setHandled(true);
@@ -1159,7 +1025,7 @@
// failure to pin a new node during a split
System.out.println("ASTERIX EXCEPTION");
e.printStackTrace();
- releaseLatch(node, ctx.op, isLeaf);
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
bufferCache.unpin(node);
unpins++;
BTreeException propException = new BTreeException(e);
@@ -1173,28 +1039,29 @@
private boolean bulkNewPage = false;
- public final class BulkLoadContext {
+ public final class BulkLoadContext implements IIndexBulkLoadContext {
public final int slotSize;
public final int leafMaxBytes;
public final int interiorMaxBytes;
- public final SplitKey splitKey;
+ public final BTreeSplitKey splitKey;
// we maintain a frontier of nodes for each level
private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
private final IBTreeLeafFrame leafFrame;
private final IBTreeInteriorFrame interiorFrame;
- private final IBTreeMetaDataFrame metaFrame;
+ private final ITreeIndexMetaDataFrame metaFrame;
- private final IBTreeTupleWriter tupleWriter;
+ private final ITreeIndexTupleWriter tupleWriter;
public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
+ splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
tupleWriter = leafFrame.getTupleWriter();
NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
- leafFrontier.pageId = getFreePage(metaFrame);
- leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
+ leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
+ leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+ bulkNewPage);
leafFrontier.page.acquireWriteLatch();
interiorFrame.setPage(leafFrontier.page);
@@ -1216,7 +1083,7 @@
private void addLevel() throws HyracksDataException {
NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());
- frontier.pageId = getFreePage(metaFrame);
+ frontier.pageId = freePageManager.getFreePage(metaFrame);
frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
frontier.page.acquireWriteLatch();
frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
@@ -1242,23 +1109,23 @@
int spaceUsed = ctx.interiorFrame.getBuffer().capacity() - ctx.interiorFrame.getTotalFreeSpace();
if (spaceUsed + spaceNeeded > ctx.interiorMaxBytes) {
- SplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
+ BTreeSplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
tuple = copyKey.getTuple();
- frontier.lastTuple.resetByOffset(frontier.page.getBuffer(), ctx.interiorFrame
- .getTupleOffset(ctx.interiorFrame.getTupleCount() - 1));
+ frontier.lastTuple.resetByTupleOffset(frontier.page.getBuffer(),
+ ctx.interiorFrame.getTupleOffset(ctx.interiorFrame.getTupleCount() - 1));
int splitKeySize = ctx.tupleWriter.bytesRequired(frontier.lastTuple, 0, cmp.getKeyFieldCount());
ctx.splitKey.initData(splitKeySize);
ctx.tupleWriter
.writeTupleFields(frontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
- ctx.splitKey.getTuple().resetByOffset(ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
ctx.splitKey.setLeftPage(frontier.pageId);
ctx.interiorFrame.deleteGreatest(cmp);
frontier.page.releaseWriteLatch();
bufferCache.unpin(frontier.page);
- frontier.pageId = getFreePage(ctx.metaFrame);
+ frontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
ctx.splitKey.setRightPage(frontier.pageId);
propagateBulk(ctx, level + 1);
@@ -1279,19 +1146,23 @@
}
// assumes btree has been created and opened
- public BulkLoadContext beginBulkLoad(float fillFactor, IBTreeLeafFrame leafFrame,
- IBTreeInteriorFrame interiorFrame, IBTreeMetaDataFrame metaFrame) throws HyracksDataException {
+ @Override
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
+ ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
if (loaded)
throw new HyracksDataException("Trying to bulk-load BTree but has BTree already been loaded.");
- BulkLoadContext ctx = new BulkLoadContext(fillFactor, leafFrame, interiorFrame, metaFrame);
+ BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IBTreeLeafFrame) leafFrame,
+ (IBTreeInteriorFrame) interiorFrame, metaFrame);
ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(cmp.getFieldCount());
ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
return ctx;
}
- public void bulkLoadAddTuple(BulkLoadContext ctx, ITupleReference tuple) throws HyracksDataException {
+ @Override
+ public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
+ BulkLoadContext ctx = (BulkLoadContext) ictx;
NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
IBTreeLeafFrame leafFrame = ctx.leafFrame;
@@ -1308,12 +1179,12 @@
leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
int splitKeySize = ctx.tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
ctx.splitKey.initData(splitKeySize);
- ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey
- .getBuffer(), 0);
- ctx.splitKey.getTuple().resetByOffset(ctx.splitKey.getBuffer(), 0);
+ ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(),
+ ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
ctx.splitKey.setLeftPage(leafFrontier.pageId);
int prevPageId = leafFrontier.pageId;
- leafFrontier.pageId = getFreePage(ctx.metaFrame);
+ leafFrontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
leafFrame.setNextLeaf(leafFrontier.pageId);
leafFrontier.page.releaseWriteLatch();
@@ -1322,7 +1193,8 @@
ctx.splitKey.setRightPage(leafFrontier.pageId);
propagateBulk(ctx, 1);
- leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
+ leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+ bulkNewPage);
leafFrontier.page.acquireWriteLatch();
leafFrame.setPage(leafFrontier.page);
leafFrame.initBuffer((byte) 0);
@@ -1340,18 +1212,29 @@
// System.out.println(s);
}
- public void endBulkLoad(BulkLoadContext ctx) throws HyracksDataException {
+ @Override
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
// copy root
+ BulkLoadContext ctx = (BulkLoadContext) ictx;
ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), bulkNewPage);
rootNode.acquireWriteLatch();
+ NodeFrontier lastNodeFrontier = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1);
+ IBTreeInteriorFrame interiorFrame = ctx.interiorFrame;
try {
- ICachedPage toBeRoot = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1).page;
+ ICachedPage toBeRoot = lastNodeFrontier.page;
System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode.getBuffer().array(), 0, toBeRoot.getBuffer()
.capacity());
} finally {
rootNode.releaseWriteLatch();
bufferCache.unpin(rootNode);
+ // register old root as free page
+ freePageManager.addFreePage(ctx.metaFrame, lastNodeFrontier.pageId);
+
+ // make old root a free page
+ interiorFrame.setPage(lastNodeFrontier.page);
+ interiorFrame.initBuffer(freePageManager.getFreePageLevelIndicator());
+
// cleanup
for (int i = 0; i < ctx.nodeFrontiers.size(); i++) {
ctx.nodeFrontiers.get(i).page.releaseWriteLatch();
@@ -1364,21 +1247,44 @@
loaded = true;
}
- public BTreeOpContext createOpContext(BTreeOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame) {
- // TODO: figure out better tree-height hint
- return new BTreeOpContext(op, leafFrame, interiorFrame, metaFrame, 6);
+ @Override
+ public BTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) {
+ return new BTreeOpContext(op, (IBTreeLeafFrame) leafFrame, (IBTreeInteriorFrame) interiorFrame, metaFrame, 6);
}
- public IBTreeInteriorFrameFactory getInteriorFrameFactory() {
+ public ITreeIndexFrameFactory getInteriorFrameFactory() {
return interiorFrameFactory;
}
- public IBTreeLeafFrameFactory getLeafFrameFactory() {
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
return leafFrameFactory;
}
public MultiComparator getMultiComparator() {
return cmp;
}
+
+ public IFreePageManager getFreePageManager() {
+ return freePageManager;
+ }
+
+ public int getRootPageId() {
+ return rootPage;
+ }
+
+ @Override
+ public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ throw new Exception("BTree Update not implemented.");
+ }
+
+ @Override
+ public int getFieldCount() {
+ return cmp.getFieldCount();
+ }
+
+ @Override
+ public IndexType getIndexType() {
+ return IndexType.BTREE;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
new file mode 100644
index 0000000..855f9e6
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
@@ -0,0 +1,21 @@
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class BTreeCursorInitialState implements ICursorInitialState {
+
+ private ICachedPage page;
+
+ public BTreeCursorInitialState(ICachedPage page) {
+ this.page = page;
+ }
+
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ public void setPage(ICachedPage page) {
+ this.page = page;
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
index 7ec61ed..8019bcb 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
@@ -15,7 +15,9 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
-public class BTreeException extends Exception {
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+public class BTreeException extends TreeIndexException {
private static final long serialVersionUID = 1L;
private boolean handled = false;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
index 84c11a2..6d65b14 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -15,41 +15,46 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
-public final class BTreeOpContext {
- public final BTreeOp op;
+public final class BTreeOpContext implements IndexOpContext {
+ public final IndexOp op;
public final IBTreeLeafFrame leafFrame;
public final IBTreeInteriorFrame interiorFrame;
- public final IBTreeMetaDataFrame metaFrame;
- public IBTreeCursor cursor;
+ public final ITreeIndexMetaDataFrame metaFrame;
+ public ITreeIndexCursor cursor;
+ public BTreeCursorInitialState cursorInitialState;
public RangePredicate pred;
- public final SplitKey splitKey;
+ public final BTreeSplitKey splitKey;
public int opRestarts = 0;
public final IntArrayList pageLsns; // used like a stack
public final IntArrayList smPages;
public final IntArrayList freePages;
- public BTreeOpContext(BTreeOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- IBTreeMetaDataFrame metaFrame, int treeHeightHint) {
+ public BTreeOpContext(IndexOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
this.op = op;
this.leafFrame = leafFrame;
this.interiorFrame = interiorFrame;
this.metaFrame = metaFrame;
pageLsns = new IntArrayList(treeHeightHint, treeHeightHint);
- if (op != BTreeOp.BTO_SEARCH) {
+ if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
smPages = new IntArrayList(treeHeightHint, treeHeightHint);
freePages = new IntArrayList(treeHeightHint, treeHeightHint);
pred = new RangePredicate(true, null, null, true, true, null, null);
- splitKey = new SplitKey(leafFrame.getTupleWriter().createTupleReference());
+ splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
} else {
smPages = null;
freePages = null;
splitKey = null;
+ cursorInitialState = new BTreeCursorInitialState(null);
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
similarity index 87%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
index 2ef1905..4c7503d 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangeSearchCursor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -17,15 +17,19 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-public class RangeSearchCursor implements IBTreeCursor {
+public class BTreeRangeSearchCursor implements ITreeIndexCursor {
private int fileId = -1;
private ICachedPage page = null;
@@ -42,7 +46,7 @@
private FindTupleNoExactMatchPolicy lowKeyFtp;
private FindTupleNoExactMatchPolicy highKeyFtp;
- private IBTreeTupleReference frameTuple;
+ private ITreeIndexTupleReference frameTuple;
private RangePredicate pred;
private MultiComparator lowKeyCmp;
@@ -50,7 +54,7 @@
private ITupleReference lowKey;
private ITupleReference highKey;
- public RangeSearchCursor(IBTreeLeafFrame frame) {
+ public BTreeRangeSearchCursor(IBTreeLeafFrame frame) {
this.frame = frame;
this.frameTuple = frame.createTupleReference();
}
@@ -83,7 +87,7 @@
bufferCache.unpin(page);
page = nextLeaf;
- frame.setPage(page);
+ frame.setPage(page);
}
@Override
@@ -170,14 +174,14 @@
}
@Override
- public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws Exception {
// in case open is called multiple times without closing
- if (this.page != null) {
- this.page.releaseReadLatch();
- bufferCache.unpin(this.page);
+ if (page != null) {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
}
- this.page = page;
+ page = ((BTreeCursorInitialState) initialState).getPage();
frame.setPage(page);
pred = (RangePredicate) searchPred;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
similarity index 79%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
index cf2fd74..e664e5b 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SplitKey.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeSplitKey.java
@@ -17,18 +17,19 @@
import java.nio.ByteBuffer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-public class SplitKey {
+public class BTreeSplitKey implements ISplitKey {
public byte[] data = null;
public ByteBuffer buf = null;
- public IBTreeTupleReference tuple;
+ public ITreeIndexTupleReference tuple;
public int keySize = 0;
- public SplitKey(IBTreeTupleReference tuple) {
+ public BTreeSplitKey(ITreeIndexTupleReference tuple) {
this.tuple = tuple;
}
-
+
public void initData(int keySize) {
// try to reuse existing memory from a lower-level split if possible
this.keySize = keySize;
@@ -42,7 +43,7 @@
buf = ByteBuffer.wrap(data);
}
- tuple.resetByOffset(buf, 0);
+ tuple.resetByTupleOffset(buf, 0);
}
public void reset() {
@@ -54,7 +55,7 @@
return buf;
}
- public IBTreeTupleReference getTuple() {
+ public ITreeIndexTupleReference getTuple() {
return tuple;
}
@@ -79,12 +80,12 @@
buf.putInt(keySize + 4, rightPage);
}
- public SplitKey duplicate(IBTreeTupleReference copyTuple) {
- SplitKey copy = new SplitKey(copyTuple);
+ public BTreeSplitKey duplicate(ITreeIndexTupleReference copyTuple) {
+ BTreeSplitKey copy = new BTreeSplitKey(copyTuple);
copy.data = data.clone();
copy.buf = ByteBuffer.wrap(copy.data);
copy.tuple.setFieldCount(tuple.getFieldCount());
- copy.tuple.resetByOffset(copy.buf, 0);
+ copy.tuple.resetByTupleOffset(copy.buf, 0);
return copy;
}
}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
index 8711a17..621281a 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixPrefixTupleReference.java
@@ -16,9 +16,9 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
public class FieldPrefixPrefixTupleReference extends TypeAwareTupleReference {
@@ -28,13 +28,13 @@
// assumes tuple index refers to prefix tuples
@Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- FieldPrefixNSMLeafFrame concreteFrame = (FieldPrefixNSMLeafFrame) frame;
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ BTreeFieldPrefixNSMLeafFrame concreteFrame = (BTreeFieldPrefixNSMLeafFrame) frame;
int prefixSlotOff = concreteFrame.slotManager.getPrefixSlotOff(tupleIndex);
int prefixSlot = concreteFrame.getBuffer().getInt(prefixSlotOff);
setFieldCount(concreteFrame.slotManager.decodeFirstSlotField(prefixSlot));
tupleStartOff = concreteFrame.slotManager.decodeSecondSlotField(prefixSlot);
buf = concreteFrame.getBuffer();
- resetByOffset(buf, tupleStartOff);
+ resetByTupleOffset(buf, tupleStartOff);
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
index 83a2d92..6f98c8c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixSlotManager.java
@@ -18,9 +18,12 @@
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public class FieldPrefixSlotManager implements IPrefixSlotManager {
@@ -30,7 +33,7 @@
public static final int GREATEST_SLOT = 0x00FFFFFF;
private ByteBuffer buf;
- private FieldPrefixNSMLeafFrame frame;
+ private BTreeFieldPrefixNSMLeafFrame frame;
public int decodeFirstSlotField(int slot) {
return (slot & 0xFF000000) >>> 24;
@@ -45,7 +48,7 @@
}
// returns prefix slot number, or TUPLE_UNCOMPRESSED of no match was found
- public int findPrefix(ITupleReference tuple, IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp) {
+ public int findPrefix(ITupleReference tuple, ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp) {
int prefixMid;
int prefixBegin = 0;
int prefixEnd = frame.getPrefixTupleCount() - 1;
@@ -68,8 +71,8 @@
}
@Override
- public int findSlot(ITupleReference searchKey, IBTreeTupleReference frameTuple,
- IBTreeTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
+ public int findSlot(ITupleReference searchKey, ITreeIndexTupleReference frameTuple,
+ ITreeIndexTupleReference framePrefixTuple, MultiComparator multiCmp, FindTupleMode mode,
FindTupleNoExactMatchPolicy matchPolicy) {
if (frame.getTupleCount() <= 0)
encodeSlotFields(TUPLE_UNCOMPRESSED, GREATEST_SLOT);
@@ -232,7 +235,7 @@
}
}
- public void setFrame(FieldPrefixNSMLeafFrame frame) {
+ public void setFrame(BTreeFieldPrefixNSMLeafFrame frame) {
this.frame = frame;
this.buf = frame.getBuffer();
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
index c131e3f..9a655fc 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
@@ -2,26 +2,26 @@
import java.nio.ByteBuffer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-public class FieldPrefixTupleReference implements IBTreeTupleReference {
+public class FieldPrefixTupleReference implements ITreeIndexTupleReference {
- private FieldPrefixNSMLeafFrame frame;
+ private BTreeFieldPrefixNSMLeafFrame frame;
private int prefixTupleStartOff;
private int suffixTupleStartOff;
private int numPrefixFields;
private int fieldCount;
- private IBTreeTupleReference helperTuple;
+ private ITreeIndexTupleReference helperTuple;
- public FieldPrefixTupleReference(IBTreeTupleReference helperTuple) {
+ public FieldPrefixTupleReference(ITreeIndexTupleReference helperTuple) {
this.helperTuple = helperTuple;
}
@Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- this.frame = (FieldPrefixNSMLeafFrame) frame;
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ this.frame = (BTreeFieldPrefixNSMLeafFrame) frame;
int tupleSlotOff = this.frame.slotManager.getTupleSlotOff(tupleIndex);
int tupleSlot = this.frame.getBuffer().getInt(tupleSlotOff);
@@ -63,11 +63,11 @@
public int getFieldLength(int fIdx) {
if (fIdx < numPrefixFields) {
helperTuple.setFieldCount(numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
+ helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
return helperTuple.getFieldLength(fIdx);
} else {
helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
+ helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
return helperTuple.getFieldLength(fIdx - numPrefixFields);
}
}
@@ -76,18 +76,18 @@
public int getFieldStart(int fIdx) {
if (fIdx < numPrefixFields) {
helperTuple.setFieldCount(numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), prefixTupleStartOff);
+ helperTuple.resetByTupleOffset(frame.getBuffer(), prefixTupleStartOff);
return helperTuple.getFieldStart(fIdx);
} else {
helperTuple.setFieldCount(numPrefixFields, fieldCount - numPrefixFields);
- helperTuple.resetByOffset(frame.getBuffer(), suffixTupleStartOff);
+ helperTuple.resetByTupleOffset(frame.getBuffer(), suffixTupleStartOff);
return helperTuple.getFieldStart(fIdx - numPrefixFields);
}
}
// unsupported operation
@Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOffset) {
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset) {
frame = null;
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
index 83baa92..60f682c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/NodeFrontier.java
@@ -15,15 +15,15 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public class NodeFrontier {
public ICachedPage page;
public int pageId;
- public IBTreeTupleReference lastTuple;
+ public ITreeIndexTupleReference lastTuple;
- public NodeFrontier(IBTreeTupleReference lastTuple) {
+ public NodeFrontier(ITreeIndexTupleReference lastTuple) {
this.lastTuple = lastTuple;
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
index 697b9ab..699f6a6 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/RangePredicate.java
@@ -16,7 +16,8 @@
package edu.uci.ics.hyracks.storage.am.btree.impls;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public class RangePredicate implements ISearchPredicate {
diff --git a/hyracks-storage-am-invertedindex/.classpath b/hyracks-storage-am-common/.classpath
similarity index 100%
rename from hyracks-storage-am-invertedindex/.classpath
rename to hyracks-storage-am-common/.classpath
diff --git a/hyracks-storage-am-invertedindex/.project b/hyracks-storage-am-common/.project
similarity index 91%
copy from hyracks-storage-am-invertedindex/.project
copy to hyracks-storage-am-common/.project
index 9303fb7..ec47f6b 100644
--- a/hyracks-storage-am-invertedindex/.project
+++ b/hyracks-storage-am-common/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>hyracks-storage-am-invertedindex</name>
+ <name>hyracks-storage-am-common</name>
<comment></comment>
<projects>
</projects>
diff --git a/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..b37b3bb
--- /dev/null
+++ b/hyracks-storage-am-common/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,6 @@
+#Thu Jul 07 12:23:56 PDT 2011
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..7b5e618
--- /dev/null
+++ b/hyracks-storage-am-common/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,8 @@
+#Thu Jul 07 12:23:53 PDT 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-storage-am-common/pom.xml b/hyracks-storage-am-common/pom.xml
new file mode 100644
index 0000000..0d20126
--- /dev/null
+++ b/hyracks-storage-am-common/pom.xml
@@ -0,0 +1,56 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-api</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
similarity index 72%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
copy to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
index 2912cf9..5f7c88d 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
@@ -13,8 +13,12 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
-public interface IBTreeMetaDataFrameFactory {
- public IBTreeMetaDataFrame getFrame();
-}
\ No newline at end of file
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public interface ICursorInitialState {
+ public ICachedPage getPage();
+
+ public void setPage(ICachedPage page);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
new file mode 100644
index 0000000..7abc0e3
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public interface IFreePageManager {
+ public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+
+ public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException;
+
+ public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+
+ public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException;
+
+ public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory();
+
+ // required to return negative values
+ public byte getMetaPageLevelIndicator();
+
+ public byte getFreePageLevelIndicator();
+
+ // determined by examining level indicator
+ public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame);
+
+ public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java
new file mode 100644
index 0000000..a896d80
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IIndexBulkLoadContext.java
@@ -0,0 +1,4 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+public interface IIndexBulkLoadContext {
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
similarity index 76%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
index 255c370..f4836e0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISearchPredicate.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
@@ -13,10 +13,14 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import java.io.Serializable;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
public interface ISearchPredicate extends Serializable {
- public boolean isForward();
-}
+ public MultiComparator getLowKeyComparator();
+
+ public MultiComparator getHighKeyComparator();
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
similarity index 71%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
index c1d4e0a..29646e7 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/ISlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISlotManager.java
@@ -13,17 +13,17 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public interface ISlotManager {
- public void setFrame(IBTreeFrame frame);
+ public void setFrame(ITreeIndexFrame frame);
- public int findTupleIndex(ITupleReference searchKey, IBTreeTupleReference frameTuple, MultiComparator multiCmp,
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy);
public int insertSlot(int tupleIndex, int tupleOff);
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
new file mode 100644
index 0000000..246c09d
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.nio.ByteBuffer;
+
+public interface ISplitKey {
+ public void initData(int keySize);
+
+ public void reset();
+
+ public ByteBuffer getBuffer();
+
+ public ITreeIndexTupleReference getTuple();
+
+ public int getLeftPage();
+
+ public int getRightPage();
+
+ public void setLeftPage(int leftPage);
+
+ public void setRightPage(int rightPage);
+
+ public void setPages(int leftPage, int rightPage);
+
+ public ISplitKey duplicate(ITreeIndexTupleReference copyTuple);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
new file mode 100644
index 0000000..2d77f06
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndex.java
@@ -0,0 +1,52 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+
+public interface ITreeIndex {
+ // init:
+
+ public void create(int indexFileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception;
+
+ public void open(int indexFileId);
+
+ // operations:
+
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+
+ public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception;
+
+ public IndexOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame);
+
+ // bulk loading:
+
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
+ ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+
+ public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException;
+
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException;
+
+ // search:
+ public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+ IndexOpContext ictx) throws HyracksDataException;
+
+ // utility:
+
+ public IFreePageManager getFreePageManager();
+
+ public int getRootPageId();
+
+ public ITreeIndexFrameFactory getLeafFrameFactory();
+
+ public ITreeIndexFrameFactory getInteriorFrameFactory();
+
+ public int getFieldCount();
+
+ public IndexType getIndexType();
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
similarity index 85%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
index 8934921..22b2b6f 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexCursor.java
@@ -13,20 +13,20 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-public interface IBTreeCursor {
+public interface ITreeIndexCursor {
public void reset();
public boolean hasNext() throws Exception;
public void next() throws Exception;
- public void open(ICachedPage page, ISearchPredicate searchPred) throws Exception;
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws Exception;
public ICachedPage getPage();
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
similarity index 74%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 0e15903..246efe4 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -13,32 +13,34 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SpaceStatus;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SplitKey;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-public interface IBTreeFrame {
+public interface ITreeIndexFrame {
public void setPage(ICachedPage page);
public ICachedPage getPage();
public ByteBuffer getBuffer();
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception;
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception;
+
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
public void update(int rid, ITupleReference tuple) throws Exception;
public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
- public void compact(MultiComparator cmp);
+ // returns true if slots were modified, false otherwise
+ public boolean compact(MultiComparator cmp);
public boolean compress(MultiComparator cmp) throws HyracksDataException;
@@ -47,9 +49,9 @@
public int getTupleCount();
// assumption: page must be write-latched at this point
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
- public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp);
+ public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp);
public int getTupleOffset(int slotNum);
@@ -65,10 +67,9 @@
public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException;
// TODO; what if tuples more than half-page size?
- public int split(IBTreeFrame rightFrame, ITupleReference tuple, MultiComparator cmp, SplitKey splitKey)
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
throws Exception;
- // TODO: check if we do something nicer than returning object
public ISlotManager getSlotManager();
// ATTENTION: in b-tree operations it may not always be possible to
@@ -77,6 +78,8 @@
// values when given the same ByteBuffer for the functions below
public boolean isLeaf();
+ public boolean isInterior();
+
public byte getLevel();
public void setLevel(byte level);
@@ -85,8 +88,6 @@
public void setSmFlag(boolean smFlag);
- public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException;
-
public int getSlotSize();
// TODO: should be removed after new tuple format
@@ -97,6 +98,7 @@
public void setFreeSpaceOff(int freeSpace);
- public IBTreeTupleWriter getTupleWriter();
+ public ITreeIndexTupleWriter getTupleWriter();
+ public int getPageHeaderSize();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
new file mode 100644
index 0000000..83b95b6
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
@@ -0,0 +1,7 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+import java.io.Serializable;
+
+public interface ITreeIndexFrameFactory extends Serializable {
+ public ITreeIndexFrame createFrame();
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
similarity index 91%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
index 58739e2..bbd03d6 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrame.java
@@ -13,11 +13,11 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-public interface IBTreeMetaDataFrame {
+public interface ITreeIndexMetaDataFrame {
public void initBuffer(int level);
public void setPage(ICachedPage page);
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
similarity index 80%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
index 2912cf9..6fd88e8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeMetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
@@ -13,8 +13,8 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
-public interface IBTreeMetaDataFrameFactory {
- public IBTreeMetaDataFrame getFrame();
+public interface ITreeIndexMetaDataFrameFactory {
+ public ITreeIndexMetaDataFrame createFrame();
}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
similarity index 75%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
index 38d6217..d2c2df4 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
@@ -13,18 +13,18 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-public interface IBTreeTupleReference extends ITupleReference {
+public interface ITreeIndexTupleReference extends ITupleReference {
public void setFieldCount(int fieldCount);
public void setFieldCount(int fieldStartIndex, int fieldCount);
- public void resetByOffset(ByteBuffer buf, int tupleStartOffset);
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex);
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
similarity index 89%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
index d8cb9af..39577ea 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
@@ -13,13 +13,13 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-public interface IBTreeTupleWriter {
+public interface ITreeIndexTupleWriter {
public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
public int bytesRequired(ITupleReference tuple);
@@ -33,5 +33,5 @@
// writer
// the main idea is that the format of the written tuple may not be the same
// as the format written by this writer
- public IBTreeTupleReference createTupleReference();
+ public ITreeIndexTupleReference createTupleReference();
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
similarity index 78%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
index 50733de..bd7bfda 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriterFactory.java
@@ -13,10 +13,10 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.common.api;
import java.io.Serializable;
-public interface IBTreeLeafFrameFactory extends Serializable {
- public IBTreeLeafFrame getFrame();
-}
\ No newline at end of file
+public interface ITreeIndexTupleWriterFactory extends Serializable {
+ public ITreeIndexTupleWriter createTupleWriter();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
new file mode 100644
index 0000000..d5f9f44
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.common.api;
+
+public enum IndexType {
+ BTREE, RTREE, INVERTED
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
similarity index 61%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
copy to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
index 3b06153..ad3db58 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/TreeIndexException.java
@@ -13,18 +13,26 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.api;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
-
-public class SimpleTupleWriterFactory implements IBTreeTupleWriterFactory {
+public class TreeIndexException extends Exception {
private static final long serialVersionUID = 1L;
+ private boolean handled = false;
- @Override
- public IBTreeTupleWriter createTupleWriter() {
- return new SimpleTupleWriter();
+ public TreeIndexException(Exception e) {
+ super(e);
}
+ public TreeIndexException(String message) {
+ super(message);
+ }
+
+ public void setHandled(boolean handled) {
+ this.handled = handled;
+ }
+
+ public boolean getHandled() {
+ return handled;
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
new file mode 100644
index 0000000..c905eb2
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public abstract class AbstractTreeIndexOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor implements
+ ITreeIndexOperatorDescriptorHelper {
+
+ private static final long serialVersionUID = 1L;
+
+ protected final IFileSplitProvider fileSplitProvider;
+
+ protected final IBinaryComparatorFactory[] comparatorFactories;
+
+ protected final ITreeIndexFrameFactory interiorFrameFactory;
+ protected final ITreeIndexFrameFactory leafFrameFactory;
+
+ protected final IStorageManagerInterface storageManager;
+ protected final IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+
+ protected final ITypeTrait[] typeTraits;
+
+ protected final ITreeIndexOpHelperFactory opHelperFactory;
+
+ public AbstractTreeIndexOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
+ RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+ ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, inputArity, outputArity);
+ this.fileSplitProvider = fileSplitProvider;
+ this.storageManager = storageManager;
+ this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.typeTraits = typeTraits;
+ this.comparatorFactories = comparatorFactories;
+ this.opHelperFactory = opHelperFactory;
+ if (outputArity > 0)
+ recordDescriptors[0] = recDesc;
+ }
+
+ @Override
+ public IFileSplitProvider getTreeIndexFileSplitProvider() {
+ return fileSplitProvider;
+ }
+
+ @Override
+ public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
+ return comparatorFactories;
+ }
+
+ @Override
+ public ITypeTrait[] getTreeIndexTypeTraits() {
+ return typeTraits;
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
+ return interiorFrameFactory;
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getTreeIndexLeafFactory() {
+ return leafFrameFactory;
+ }
+
+ @Override
+ public IStorageManagerInterface getStorageManager() {
+ return storageManager;
+ }
+
+ @Override
+ public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider() {
+ return treeIndexRegistryProvider;
+ }
+
+ @Override
+ public RecordDescriptor getRecordDescriptor() {
+ return recordDescriptors[0];
+ }
+
+ @Override
+ public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
+ return opHelperFactory;
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
similarity index 77%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
index 7b66d0a..aadcaf9 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/IBTreeRegistryProvider.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IIndexRegistryProvider.java
@@ -13,12 +13,12 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import java.io.Serializable;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-public interface IBTreeRegistryProvider extends Serializable {
- public BTreeRegistry getBTreeRegistry(IHyracksStageletContext ctx);
+public interface IIndexRegistryProvider<IndexType> extends Serializable {
+ public IndexRegistry<IndexType> getRegistry(IHyracksStageletContext ctx);
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
new file mode 100644
index 0000000..fa37fab
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
@@ -0,0 +1,10 @@
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.Serializable;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+
+public interface ITreeIndexOpHelperFactory extends Serializable {
+ public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+ final IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode);
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
new file mode 100644
index 0000000..6ca4529
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
@@ -0,0 +1,30 @@
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.IActivityNode;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public interface ITreeIndexOperatorDescriptorHelper extends IActivityNode {
+ public IFileSplitProvider getTreeIndexFileSplitProvider();
+
+ public IBinaryComparatorFactory[] getTreeIndexComparatorFactories();
+
+ public ITypeTrait[] getTreeIndexTypeTraits();
+
+ public ITreeIndexFrameFactory getTreeIndexInteriorFactory();
+
+ public ITreeIndexFrameFactory getTreeIndexLeafFactory();
+
+ public IStorageManagerInterface getStorageManager();
+
+ public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider();
+
+ public RecordDescriptor getRecordDescriptor();
+
+ public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory();
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
new file mode 100644
index 0000000..aa41184
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+public enum IndexHelperOpenMode {
+ OPEN, CREATE, ENLIST
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
similarity index 74%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
index 9b90e22..de00d5a 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeRegistry.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
@@ -13,24 +13,21 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import java.util.HashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+public class IndexRegistry<IndexType> {
-public class BTreeRegistry {
-
- private HashMap<Integer, BTree> map = new HashMap<Integer, BTree>();
+ private HashMap<Integer, IndexType> map = new HashMap<Integer, IndexType>();
private Lock registryLock = new ReentrantLock();
- public BTree get(int fileId) {
+ public IndexType get(int fileId) {
return map.get(fileId);
}
- // TODO: not very high concurrency, but good enough for now
public void lock() {
registryLock.lock();
}
@@ -39,8 +36,8 @@
registryLock.unlock();
}
- public void register(int fileId, BTree btree) {
- map.put(fileId, btree);
+ public void register(int fileId, IndexType index) {
+ map.put(fileId, index);
}
public void unregister(int fileId) {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
similarity index 96%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
index a3f343a..3db9db2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/PermutingFrameTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/PermutingFrameTupleReference.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
similarity index 60%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
index a4c4b61..7d585b3 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeBulkLoadOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
@@ -23,23 +23,24 @@
import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-public class BTreeBulkLoadOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
+public class TreeIndexBulkLoadOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
private static final long serialVersionUID = 1L;
private final int[] fieldPermutation;
private final float fillFactor;
- public BTreeBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
- IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider,
- IBTreeInteriorFrameFactory interiorFactory, IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, float fillFactor) {
- super(spec, 1, 0, null, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory, leafFactory,
- typeTraits, comparatorFactories);
+ public TreeIndexBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
+ float fillFactor, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
this.fieldPermutation = fieldPermutation;
this.fillFactor = fillFactor;
}
@@ -47,7 +48,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new BTreeBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
+ return new TreeIndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, fillFactor,
recordDescProvider);
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
new file mode 100644
index 0000000..2001039
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
+
+public class TreeIndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
+ private float fillFactor;
+ private final TreeIndexOpHelper treeIndexOpHelper;
+ private FrameTupleAccessor accessor;
+ private IIndexBulkLoadContext bulkLoadCtx;
+
+ private IRecordDescriptorProvider recordDescProvider;
+
+ private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+
+ public TreeIndexBulkLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition, int[] fieldPermutation, float fillFactor,
+ IRecordDescriptorProvider recordDescProvider) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.CREATE);
+ this.fillFactor = fillFactor;
+ this.recordDescProvider = recordDescProvider;
+ tuple.setFieldPermutation(fieldPermutation);
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
+ ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+ bulkLoadCtx = treeIndexOpHelper.getTreeIndex().beginBulkLoad(fillFactor, treeIndexOpHelper.getLeafFrame(),
+ treeIndexOpHelper.getInteriorFrame(), metaFrame);
+ } catch (Exception e) {
+ // cleanup in case of failure
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+ int tupleCount = accessor.getTupleCount();
+ for (int i = 0; i < tupleCount; i++) {
+ tuple.reset(accessor, i);
+ treeIndexOpHelper.getTreeIndex().bulkLoadAddTuple(bulkLoadCtx, tuple);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ treeIndexOpHelper.getTreeIndex().endBulkLoad(bulkLoadCtx);
+ } finally {
+ treeIndexOpHelper.deinit();
+ }
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
new file mode 100644
index 0000000..73b5323
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorDescriptor.java
@@ -0,0 +1,47 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexDiskOrderScanOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public TreeIndexDiskOrderScanOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 0, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, null, opHelperFactory);
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new TreeIndexDiskOrderScanOperatorNodePushable(this, ctx, partition);
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
new file mode 100644
index 0000000..415b9e3
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+
+public class TreeIndexDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
+ private final TreeIndexOpHelper treeIndexOpHelper;
+
+ public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.OPEN);
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+
+ ITreeIndexFrame cursorFrame = treeIndexOpHelper.getOperatorDescriptor().getTreeIndexLeafFactory().createFrame();
+ ITreeIndexCursor cursor = treeIndexOpHelper.createDiskOrderScanCursor(cursorFrame);
+ ITreeIndexMetaDataFrame metaFrame = new LIFOMetaDataFrame();
+
+ IndexOpContext diskOrderScanOpCtx = treeIndexOpHelper.getTreeIndex().createOpContext(IndexOp.DISKORDERSCAN,
+ cursorFrame, null, null);
+ try {
+
+ treeIndexOpHelper.init();
+
+ try {
+ treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor, cursorFrame, metaFrame, diskOrderScanOpCtx);
+
+ int fieldCount = treeIndexOpHelper.getTreeIndex().getFieldCount();
+ ByteBuffer frame = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksStageletContext()
+ .getFrameSize());
+ appender.reset(frame, true);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ while (cursor.hasNext()) {
+ tb.reset();
+ cursor.next();
+
+ ITupleReference frameTuple = cursor.getTuple();
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
+
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } finally {
+ cursor.close();
+ writer.close();
+ }
+
+ } catch (Exception e) {
+ deinitialize();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ treeIndexOpHelper.deinit();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
similarity index 69%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
index 970d44a..2cfa905 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeDropOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
@@ -22,27 +22,29 @@
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-public class BTreeDropOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
+public class TreeIndexDropOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
private static final long serialVersionUID = 1L;
private IStorageManagerInterface storageManager;
- private IBTreeRegistryProvider btreeRegistryProvider;
+ private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
private IFileSplitProvider fileSplitProvider;
- public BTreeDropOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
- IBTreeRegistryProvider btreeRegistryProvider, IFileSplitProvider fileSplitProvider) {
+ public TreeIndexDropOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider) {
super(spec, 0, 0);
this.storageManager = storageManager;
- this.btreeRegistryProvider = btreeRegistryProvider;
+ this.treeIndexRegistryProvider = treeIndexRegistryProvider;
this.fileSplitProvider = fileSplitProvider;
}
@Override
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new BTreeDropOperatorNodePushable(ctx, storageManager, btreeRegistryProvider, fileSplitProvider, partition);
+ return new TreeIndexDropOperatorNodePushable(ctx, storageManager, treeIndexRegistryProvider, fileSplitProvider,
+ partition);
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
new file mode 100644
index 0000000..71346f7
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public class TreeIndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
+ private static final Logger LOGGER = Logger.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
+
+ private final IHyracksStageletContext ctx;
+ private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+ private IStorageManagerInterface storageManager;
+ private IFileSplitProvider fileSplitProvider;
+ private int partition;
+
+ public TreeIndexDropOperatorNodePushable(IHyracksStageletContext ctx, IStorageManagerInterface storageManager,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+ int partition) {
+ this.ctx = ctx;
+ this.storageManager = storageManager;
+ this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+ this.fileSplitProvider = fileSplitProvider;
+ this.partition = partition;
+ }
+
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ }
+
+ @Override
+ public int getInputArity() {
+ return 0;
+ }
+
+ @Override
+ public IFrameWriter getInputFrameWriter(int index) {
+ return null;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+
+ IndexRegistry<ITreeIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
+ IBufferCache bufferCache = storageManager.getBufferCache(ctx);
+ IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
+
+ FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+
+ boolean fileIsMapped = fileMapProvider.isMapped(f);
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Cannot drop Tree with name " + f.toString()
+ + ". No file mapping exists.");
+ }
+
+ int indexFileId = fileMapProvider.lookupFileId(f);
+
+ // unregister tree instance
+ treeIndexRegistry.lock();
+ try {
+ treeIndexRegistry.unregister(indexFileId);
+ } finally {
+ treeIndexRegistry.unlock();
+ }
+
+ // remove name to id mapping
+ bufferCache.deleteFile(indexFileId);
+ }
+ // TODO: for the time being we don't throw,
+ // with proper exception handling (no hanging job problem) we should
+ // throw
+ catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
+ LOGGER.warning("Tree Drop Operator Failed Due To Exception: " + e.getMessage());
+ }
+ }
+ }
+
+ @Override
+ public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
new file mode 100644
index 0000000..0961a4f
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorDescriptor.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+// re-create in-memory state for a tree index that has already been built (i.e., the file exists):
+// 1. register files in file manager (FileManager)
+// 2. create file mappings (FileMappingProvider)
+// 3. register tree index instance (IndexRegistry)
+
+public class TreeIndexFileEnlistmentOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public TreeIndexFileEnlistmentOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 0, 0, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int partitions) throws HyracksDataException {
+ return new TreeIndexFileEnlistmentOperatorNodePushable(this, ctx, partition);
+ }
+
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
similarity index 69%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
index 023cd40..8ff6586 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeFileEnlistmentOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexFileEnlistmentOperatorNodePushable.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import edu.uci.ics.hyracks.api.comm.IFrameWriter;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
@@ -21,13 +21,14 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
-public class BTreeFileEnlistmentOperatorNodePushable extends AbstractOperatorNodePushable {
+public class TreeIndexFileEnlistmentOperatorNodePushable extends AbstractOperatorNodePushable {
- private final BTreeOpHelper btreeOpHelper;
+ private final TreeIndexOpHelper treeIndexOpHelper;
- public BTreeFileEnlistmentOperatorNodePushable(AbstractBTreeOperatorDescriptor opDesc, IHyracksStageletContext ctx,
- int partition) {
- btreeOpHelper = new BTreeOpHelper(opDesc, ctx, partition, BTreeOpHelper.BTreeMode.ENLIST_BTREE);
+ public TreeIndexFileEnlistmentOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.ENLIST);
}
@Override
@@ -47,10 +48,9 @@
@Override
public void initialize() throws HyracksDataException {
try {
- btreeOpHelper.init();
- }
- finally {
- btreeOpHelper.deinit();
+ treeIndexOpHelper.init();
+ } finally {
+ treeIndexOpHelper.deinit();
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
similarity index 60%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
index 7d50fff..c8141b9 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeInsertUpdateDeleteOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorDescriptor.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.dataflow;
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
@@ -24,26 +24,27 @@
import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
-public class BTreeInsertUpdateDeleteOperatorDescriptor extends AbstractBTreeOperatorDescriptor {
+public class TreeIndexInsertUpdateDeleteOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
private static final long serialVersionUID = 1L;
private final int[] fieldPermutation;
- private BTreeOp op;
+ private IndexOp op;
- public BTreeInsertUpdateDeleteOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
- IStorageManagerInterface storageManager, IBTreeRegistryProvider btreeRegistryProvider,
- IFileSplitProvider fileSplitProvider, IBTreeInteriorFrameFactory interiorFactory,
- IBTreeLeafFrameFactory leafFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, BTreeOp op) {
- super(spec, 1, 1, recDesc, storageManager, btreeRegistryProvider, fileSplitProvider, interiorFactory,
- leafFactory, typeTraits, comparatorFactories);
+ public TreeIndexInsertUpdateDeleteOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, IndexOp op,
+ ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
this.fieldPermutation = fieldPermutation;
this.op = op;
}
@@ -51,7 +52,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new BTreeInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
+ return new TreeIndexInsertUpdateDeleteOperatorNodePushable(this, ctx, partition, fieldPermutation,
recordDescProvider, op);
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
new file mode 100644
index 0000000..74a9efc
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -0,0 +1,119 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+
+public class TreeIndexInsertUpdateDeleteOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+ private final TreeIndexOpHelper treeIndexOpHelper;
+ private FrameTupleAccessor accessor;
+ private final IRecordDescriptorProvider recordDescProvider;
+ private final IndexOp op;
+ private final PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+ private ByteBuffer writeBuffer;
+ private IndexOpContext opCtx;
+
+ public TreeIndexInsertUpdateDeleteOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition, int[] fieldPermutation,
+ IRecordDescriptorProvider recordDescProvider, IndexOp op) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.OPEN);
+ this.recordDescProvider = recordDescProvider;
+ this.op = op;
+ tuple.setFieldPermutation(fieldPermutation);
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), inputRecDesc);
+ writeBuffer = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+ opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op, treeIndexOpHelper.getLeafFrame(),
+ treeIndexOpHelper.getInteriorFrame(), new LIFOMetaDataFrame());
+ } catch (Exception e) {
+ // cleanup in case of failure
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ final ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
+ accessor.reset(buffer);
+
+ int tupleCount = accessor.getTupleCount();
+ for (int i = 0; i < tupleCount; i++) {
+ tuple.reset(accessor, i);
+ try {
+ switch (op) {
+
+ case INSERT: {
+ treeIndex.insert(tuple, opCtx);
+ }
+ break;
+
+ case DELETE: {
+ treeIndex.delete(tuple, opCtx);
+ }
+ break;
+
+ default: {
+ throw new HyracksDataException("Unsupported operation " + op
+ + " in tree index InsertUpdateDelete operator");
+ }
+
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // pass a copy of the frame to next op
+ System.arraycopy(buffer.array(), 0, writeBuffer.array(), 0, buffer.capacity());
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ writer.close();
+ } finally {
+ treeIndexOpHelper.deinit();
+ }
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
new file mode 100644
index 0000000..fca6d8d
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public abstract class TreeIndexOpHelper {
+
+ protected ITreeIndexFrame interiorFrame;
+ protected ITreeIndexFrame leafFrame;
+ protected MultiComparator cmp;
+
+ protected ITreeIndex treeIndex;
+ protected int indexFileId = -1;
+ protected int partition;
+
+ protected ITreeIndexOperatorDescriptorHelper opDesc;
+ protected IHyracksStageletContext ctx;
+
+ protected IndexHelperOpenMode mode;
+
+ public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksStageletContext ctx,
+ int partition, IndexHelperOpenMode mode) {
+ this.opDesc = opDesc;
+ this.ctx = ctx;
+ this.mode = mode;
+ this.partition = partition;
+ }
+
+ public void init() throws HyracksDataException {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+ IFileSplitProvider fileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
+
+ FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+ boolean fileIsMapped = fileMapProvider.isMapped(f);
+
+ switch (mode) {
+
+ case OPEN: {
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Trying to open tree index from unmapped file " + f.toString());
+ }
+ }
+ break;
+
+ case CREATE:
+ case ENLIST: {
+ if (!fileIsMapped) {
+ bufferCache.createFile(f);
+ }
+ }
+ break;
+
+ }
+
+ int fileId = fileMapProvider.lookupFileId(f);
+ try {
+ bufferCache.openFile(fileId);
+ } catch (HyracksDataException e) {
+ // revert state of buffer cache since file failed to open
+ if (!fileIsMapped) {
+ bufferCache.deleteFile(fileId);
+ }
+ throw e;
+ }
+
+ // only set indexFileId member when openFile() succeeds,
+ // otherwise deinit() will try to close the file that failed to open
+ indexFileId = fileId;
+
+ interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+ leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+
+ IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider().getRegistry(ctx);
+ treeIndex = treeIndexRegistry.get(indexFileId);
+ if (treeIndex == null) {
+
+ // create new tree and register it
+ treeIndexRegistry.lock();
+ try {
+ // check if tree has already been registered by another thread
+ treeIndex = treeIndexRegistry.get(indexFileId);
+ if (treeIndex == null) {
+ // this thread should create and register the tree
+
+ IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getTreeIndexComparatorFactories().length];
+ for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
+ comparators[i] = opDesc.getTreeIndexComparatorFactories()[i].createBinaryComparator();
+ }
+
+ cmp = new MultiComparator(opDesc.getTreeIndexTypeTraits(), comparators);
+
+ treeIndex = createTreeIndex();
+ if (mode == IndexHelperOpenMode.CREATE) {
+ ITreeIndexMetaDataFrame metaFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory()
+ .createFrame();
+ try {
+ treeIndex.create(indexFileId, leafFrame, metaFrame);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ treeIndex.open(indexFileId);
+ treeIndexRegistry.register(indexFileId, treeIndex);
+ }
+ } finally {
+ treeIndexRegistry.unlock();
+ }
+ }
+ }
+
+ // MUST be overridden
+ public ITreeIndex createTreeIndex() throws HyracksDataException {
+ throw new HyracksDataException("createTreeIndex Operation not implemented.");
+ }
+
+ public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
+ return new TreeDiskOrderScanCursor(leafFrame);
+ }
+
+ public void deinit() throws HyracksDataException {
+ if (indexFileId != -1) {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ bufferCache.closeFile(indexFileId);
+ }
+ }
+
+ public ITreeIndex getTreeIndex() {
+ return treeIndex;
+ }
+
+ public IHyracksStageletContext getHyracksStageletContext() {
+ return ctx;
+ }
+
+ public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+ return opDesc;
+ }
+
+ public ITreeIndexFrame getLeafFrame() {
+ return leafFrame;
+ }
+
+ public ITreeIndexFrame getInteriorFrame() {
+ return interiorFrame;
+ }
+
+ public int getIndexFileId() {
+ return indexFileId;
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
new file mode 100644
index 0000000..574e727
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class TreeIndexStatsOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ public TreeIndexStatsOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+ ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new TreeIndexStatsOperatorNodePushable(this, ctx, partition);
+ }
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
new file mode 100644
index 0000000..f47855f
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.common.dataflow;
+
+import edu.uci.ics.hyracks.api.comm.IFrameWriter;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class TreeIndexStatsOperatorNodePushable extends AbstractOperatorNodePushable {
+ private final TreeIndexOpHelper treeIndexOpHelper;
+ private final IHyracksStageletContext ctx;
+ private TreeIndexStatsGatherer statsGatherer;
+
+ public TreeIndexStatsOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
+ int partition) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.CREATE);
+ this.ctx = ctx;
+ }
+
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ }
+
+ @Override
+ public int getInputArity() {
+ return 0;
+ }
+
+ @Override
+ public IFrameWriter getInputFrameWriter(int index) {
+ return null;
+ }
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+
+ ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
+ IBufferCache bufferCache = treeIndexOpHelper.getOperatorDescriptor().getStorageManager()
+ .getBufferCache(ctx);
+
+ statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(),
+ treeIndexOpHelper.getIndexFileId(), treeIndex.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex
+ .getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory()
+ .createFrame());
+ System.err.println(stats.toString());
+ } catch (Exception e) {
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
new file mode 100644
index 0000000..87fea47
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+
+public abstract class AbstractSlotManager implements ISlotManager {
+
+ protected static final int slotSize = 4;
+ protected ITreeIndexFrame frame;
+
+ @Override
+ public int getTupleOff(int offset) {
+ return frame.getBuffer().getInt(offset);
+ }
+
+ @Override
+ public void setSlot(int offset, int value) {
+ frame.getBuffer().putInt(offset, value);
+ }
+
+ @Override
+ public int getSlotEndOff() {
+ return frame.getBuffer().capacity() - (frame.getTupleCount() * slotSize);
+ }
+
+ @Override
+ public int getSlotStartOff() {
+ return frame.getBuffer().capacity() - slotSize;
+ }
+
+ @Override
+ public int getSlotSize() {
+ return slotSize;
+ }
+
+ @Override
+ public void setFrame(ITreeIndexFrame frame) {
+ this.frame = frame;
+ }
+
+ @Override
+ public int getSlotOff(int tupleIndex) {
+ return getSlotStartOff() - tupleIndex * slotSize;
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
similarity index 88%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
index 9ad15ee..97a4730 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SpaceStatus.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
@@ -13,8 +13,8 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.frames;
-public enum SpaceStatus {
+public enum FrameOpSpaceStatus {
INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
similarity index 89%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
index a219dd6..b621b89 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrame.java
@@ -13,11 +13,11 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.frames;
+package edu.uci.ics.hyracks.storage.am.common.frames;
import java.nio.ByteBuffer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
// all meta pages of this kind have a negative level
@@ -25,13 +25,14 @@
// the first meta page is special because it guarantees to have a correct max page
// other meta pages (i.e., with level -2) have junk in the max page field
-public class MetaDataFrame implements IBTreeMetaDataFrame {
+public class LIFOMetaDataFrame implements ITreeIndexMetaDataFrame {
protected static final int tupleCountOff = 0;
protected static final int freeSpaceOff = tupleCountOff + 4;
protected static final int maxPageOff = freeSpaceOff + 4;
- protected static final byte levelOff = maxPageOff + 1;
- protected static final byte nextPageOff = maxPageOff + 8;
+ protected static final int dummyFieldOff = maxPageOff + 4;
+ protected static final byte levelOff = dummyFieldOff + 4;
+ protected static final byte nextPageOff = levelOff + 1;
protected ICachedPage page = null;
protected ByteBuffer buf = null;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
similarity index 63%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
index 9896bf9..409c8b2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/MetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/LIFOMetaDataFrameFactory.java
@@ -13,14 +13,14 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.frames;
+package edu.uci.ics.hyracks.storage.am.common.frames;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-public class MetaDataFrameFactory implements IBTreeMetaDataFrameFactory {
+public class LIFOMetaDataFrameFactory implements ITreeIndexMetaDataFrameFactory {
@Override
- public IBTreeMetaDataFrame getFrame() {
- return new MetaDataFrame();
+ public ITreeIndexMetaDataFrame createFrame() {
+ return new LIFOMetaDataFrame();
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
similarity index 76%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index 2324eab..af1e337 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/NSMFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.frames;
+package edu.uci.ics.hyracks.storage.am.common.frames;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
@@ -25,20 +25,18 @@
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleMode;
-import edu.uci.ics.hyracks.storage.am.btree.impls.FindTupleNoExactMatchPolicy;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.OrderedSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.btree.impls.SpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.api.ISlotManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-public abstract class NSMFrame implements IBTreeFrame {
+public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
protected static final int pageLsnOff = 0; // 0
protected static final int tupleCountOff = pageLsnOff + 4; // 4
@@ -51,13 +49,13 @@
protected ByteBuffer buf = null;
protected ISlotManager slotManager;
- protected IBTreeTupleWriter tupleWriter;
- protected IBTreeTupleReference frameTuple;
+ protected ITreeIndexTupleWriter tupleWriter;
+ protected ITreeIndexTupleReference frameTuple;
- public NSMFrame(IBTreeTupleWriter tupleWriter) {
+ public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
this.tupleWriter = tupleWriter;
this.frameTuple = tupleWriter.createTupleReference();
- this.slotManager = new OrderedSlotManager();
+ this.slotManager = slotManager;
}
@Override
@@ -76,6 +74,11 @@
}
@Override
+ public boolean isInterior() {
+ return buf.get(levelOff) > 0;
+ }
+
+ @Override
public byte getLevel() {
return buf.get(levelOff);
}
@@ -126,7 +129,7 @@
}
@Override
- public void compact(MultiComparator cmp) {
+ public boolean compact(MultiComparator cmp) {
resetSpaceParams();
frameTuple.setFieldCount(cmp.getFieldCount());
@@ -144,7 +147,7 @@
for (int i = 0; i < sortedTupleOffs.size(); i++) {
int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
@@ -157,33 +160,36 @@
buf.putInt(freeSpaceOff, freeSpace);
buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+
+ return false;
}
@Override
public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
+
frameTuple.setFieldCount(cmp.getFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
int slotOff = slotManager.getSlotOff(tupleIndex);
if (tupleIndex < 0) {
- throw new BTreeException("Key to be deleted does not exist.");
+ throw new TreeIndexException("Key to be deleted does not exist.");
} else {
if (exactDelete) {
// check the non-key columns for equality by byte-by-byte
// comparison
int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
- .getFieldCount()
- - cmp.getKeyFieldCount());
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1,
+ cmp.getFieldCount() - cmp.getKeyFieldCount());
if (comparison != 0) {
- throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+ throw new TreeIndexException(
+ "Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
}
}
int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByOffset(buf, tupleOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
int tupleSize = tupleWriter.bytesRequired(frameTuple);
// perform deletion (we just do a memcpy to overwrite the slot)
@@ -198,36 +204,39 @@
}
@Override
- public SpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
int bytesRequired = tupleWriter.bytesRequired(tuple);
if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
- return SpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
- return SpaceStatus.SUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
else
- return SpaceStatus.INSUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
@Override
- public SpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
// TODO Auto-generated method stub
- return SpaceStatus.INSUFFICIENT_SPACE;
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, totalFreeSpaceOff + 4);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (totalFreeSpaceOff + 4));
+ buf.putInt(freeSpaceOff, smFlagOff + 1);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
}
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp) throws Exception {
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
-
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
@@ -262,8 +271,8 @@
for (int i = 0; i < tupleCount; i++) {
frameTuple.resetByTupleIndex(this, i);
for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
- .getFieldStart(j), frameTuple.getFieldLength(j));
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[j].deserialize(dataIn);
strBuilder.append(o.toString() + " ");
@@ -309,7 +318,7 @@
frameTuple.setFieldCount(fieldCount);
}
- public IBTreeTupleWriter getTupleWriter() {
+ public ITreeIndexTupleWriter getTupleWriter() {
return tupleWriter;
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
new file mode 100644
index 0000000..42bf70f
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
@@ -0,0 +1,187 @@
+package edu.uci.ics.hyracks.storage.am.common.freepage;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class LinkedListFreePageManager implements IFreePageManager {
+
+ private static final byte META_PAGE_LEVEL_INDICATOR = -1;
+ private static final byte FREE_PAGE_LEVEL_INDICATOR = -2;
+ private final IBufferCache bufferCache;
+ private final int fileId;
+ private final int headPage;
+ private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+
+ public LinkedListFreePageManager(IBufferCache bufferCache, int fileId, int headPage,
+ ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+ this.bufferCache = bufferCache;
+ this.fileId = fileId;
+ this.headPage = headPage;
+ this.metaDataFrameFactory = metaDataFrameFactory;
+ }
+
+ @Override
+ public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException {
+
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+ metaNode.acquireWriteLatch();
+
+ try {
+ metaFrame.setPage(metaNode);
+
+ if (metaFrame.hasSpace()) {
+ metaFrame.addFreePage(freePage);
+ } else {
+ // allocate a new page in the chain of meta pages
+ int newPage = metaFrame.getFreePage();
+ if (newPage < 0) {
+ throw new Exception("Inconsistent Meta Page State. It has no space, but it also has no entries.");
+ }
+
+ ICachedPage newNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newPage), false);
+ newNode.acquireWriteLatch();
+
+ try {
+ int metaMaxPage = metaFrame.getMaxPage();
+
+ // copy metaDataPage to newNode
+ System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode
+ .getBuffer().capacity());
+
+ metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+ metaFrame.setNextPage(newPage);
+ metaFrame.setMaxPage(metaMaxPage);
+ metaFrame.addFreePage(freePage);
+ } finally {
+ newNode.releaseWriteLatch();
+ bufferCache.unpin(newNode);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ }
+
+ @Override
+ public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+
+ metaNode.acquireWriteLatch();
+
+ int freePage = -1;
+ try {
+ metaFrame.setPage(metaNode);
+ freePage = metaFrame.getFreePage();
+ if (freePage < 0) { // no free page entry on this page
+ int nextPage = metaFrame.getNextPage();
+ if (nextPage > 0) { // sibling may have free pages
+ ICachedPage nextNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextPage), false);
+
+ nextNode.acquireWriteLatch();
+ // we copy over the free space entries of nextpage into the
+ // first meta page (metaDataPage)
+ // we need to link the first page properly to the next page
+ // of nextpage
+ try {
+ // remember entries that remain unchanged
+ int maxPage = metaFrame.getMaxPage();
+
+ // copy entire page (including sibling pointer, free
+ // page entries, and all other info)
+ // after this copy nextPage is considered a free page
+ System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode
+ .getBuffer().capacity());
+
+ // reset unchanged entry
+ metaFrame.setMaxPage(maxPage);
+
+ freePage = metaFrame.getFreePage();
+ // sibling also has no free pages, this "should" not
+ // happen, but we deal with it anyway just to be safe
+ if (freePage < 0) {
+ freePage = nextPage;
+ } else {
+ metaFrame.addFreePage(nextPage);
+ }
+ } finally {
+ nextNode.releaseWriteLatch();
+ bufferCache.unpin(nextNode);
+ }
+ } else {
+ freePage = metaFrame.getMaxPage();
+ freePage++;
+ metaFrame.setMaxPage(freePage);
+ }
+ }
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+
+ return freePage;
+ }
+
+ @Override
+ public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+ metaNode.acquireWriteLatch();
+ int maxPage = -1;
+ try {
+ metaFrame.setPage(metaNode);
+ maxPage = metaFrame.getMaxPage();
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ return maxPage;
+ }
+
+ @Override
+ public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException {
+ // initialize meta data page
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), true);
+
+ metaNode.acquireWriteLatch();
+ try {
+ metaFrame.setPage(metaNode);
+ metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+ metaFrame.setMaxPage(currentMaxPage);
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ }
+
+ @Override
+ public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
+ return metaDataFrameFactory;
+ }
+
+ @Override
+ public byte getFreePageLevelIndicator() {
+ return FREE_PAGE_LEVEL_INDICATOR;
+ }
+
+ @Override
+ public byte getMetaPageLevelIndicator() {
+ return META_PAGE_LEVEL_INDICATOR;
+ }
+
+ @Override
+ public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
+ return metaFrame.getLevel() == FREE_PAGE_LEVEL_INDICATOR;
+ }
+
+ @Override
+ public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
+ return metaFrame.getLevel() == META_PAGE_LEVEL_INDICATOR;
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
similarity index 73%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
index 29086ba..2c2cb5e 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/DiskOrderScanCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
@@ -13,34 +13,32 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.impls;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-public class DiskOrderScanCursor implements IBTreeCursor {
-
- // TODO: might want to return tuples in physical order, not logical order to
- // speed up access
+public class TreeDiskOrderScanCursor implements ITreeIndexCursor {
private int tupleIndex = 0;
private int fileId = -1;
int currentPageId = -1;
- int maxPageId = -1; // TODO: figure out how to scan to the end of file, this
- // is dirty and may not with concurrent updates
+ int maxPageId = -1;
private ICachedPage page = null;
- private IBTreeLeafFrame frame = null;
+ private ITreeIndexFrame frame = null;
private IBufferCache bufferCache = null;
- private IBTreeTupleReference frameTuple;
+ private ITreeIndexTupleReference frameTuple;
- public DiskOrderScanCursor(IBTreeLeafFrame frame) {
+ public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
this.frame = frame;
this.frameTuple = frame.getTupleWriter().createTupleReference();
}
@@ -53,7 +51,7 @@
}
@Override
- public IBTreeTupleReference getTuple() {
+ public ITreeIndexTupleReference getTuple() {
return frameTuple;
}
@@ -105,23 +103,22 @@
}
@Override
- public void open(ICachedPage page, ISearchPredicate searchPred) throws HyracksDataException {
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
// in case open is called multiple times without closing
- if (this.page != null) {
- this.page.releaseReadLatch();
- bufferCache.unpin(this.page);
+ if (page != null) {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
}
- this.page = page;
+ page = initialState.getPage();
tupleIndex = 0;
frame.setPage(page);
- RangePredicate pred = (RangePredicate) searchPred;
- MultiComparator lowKeyCmp = pred.getLowKeyComparator();
+ MultiComparator lowKeyCmp = searchPred.getLowKeyComparator();
frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
boolean leafExists = positionToNextLeaf(false);
if (!leafExists) {
throw new HyracksDataException(
- "Failed to open disk-order scan cursor for B-tree. Traget B-tree has no leaves.");
+ "Failed to open disk-order scan cursor for tree index. Traget tree index has no leaves.");
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
similarity index 92%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
index 9e2fa75..cea2500 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleMode.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleMode.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public enum FindTupleMode {
FTM_INCLUSIVE, FTM_EXCLUSIVE, FTM_EXACT
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
similarity index 92%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
index 1bc6ea3..0d534ed 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FindTupleNoExactMatchPolicy.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/FindTupleNoExactMatchPolicy.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public enum FindTupleNoExactMatchPolicy {
FTP_LOWER_KEY, FTP_HIGHER_KEY
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
similarity index 82%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
copy to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
index d698c8a..e40c5c8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOp.java
@@ -13,8 +13,8 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
-public enum BTreeOp {
- BTO_INSERT, BTO_DELETE, BTO_UPDATE, BTO_SEARCH
+public enum IndexOp {
+ INSERT, DELETE, UPDATE, SEARCH, DISKORDERSCAN
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
new file mode 100644
index 0000000..4f6e656
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
+
+public interface IndexOpContext {
+ void reset();
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
similarity index 76%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
index 2d0b9df..46551dd 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/IntArrayList.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
@@ -13,16 +13,18 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public class IntArrayList {
private int[] data;
private int size;
+ private int first;
private final int growth;
public IntArrayList(int initialCapacity, int growth) {
data = new int[initialCapacity];
size = 0;
+ first = 0;
this.growth = growth;
}
@@ -30,6 +32,10 @@
return size;
}
+ public int first() {
+ return first;
+ }
+
public void add(int i) {
if (size == data.length) {
int[] newData = new int[data.length + growth];
@@ -54,8 +60,27 @@
return data[i];
}
+ // WARNING: caller is responsible for checking i < size
+ public void set(int i, int value) {
+ data[i] = value;
+
+ }
+
+ public int getFirst() {
+ return data[first];
+ }
+
+ public void moveFirst() {
+ first++;
+ }
+
public void clear() {
size = 0;
+ first = 0;
+ }
+
+ public boolean isLast() {
+ return size == first;
}
public boolean isEmpty() {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
similarity index 83%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
index e029af0..1842bf8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/MultiComparator.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
@@ -24,8 +24,8 @@
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-@SuppressWarnings("unchecked")
public class MultiComparator {
private static final long serialVersionUID = 1L;
@@ -33,6 +33,12 @@
private IBinaryComparator[] cmps = null;
private ITypeTrait[] typeTraits;
+ private IBinaryComparator intCmp = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ public IBinaryComparator getIntCmp() {
+ return intCmp;
+ }
+
public MultiComparator(ITypeTrait[] typeTraits, IBinaryComparator[] cmps) {
this.typeTraits = typeTraits;
this.cmps = cmps;
@@ -40,8 +46,8 @@
public int compare(ITupleReference tupleA, ITupleReference tupleB) {
for (int i = 0; i < cmps.length; i++) {
- int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
- .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+ tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
if (cmp < 0)
return -1;
else if (cmp > 0)
@@ -52,8 +58,8 @@
public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
- int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
- .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+ tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
if (cmp < 0)
return -1;
else if (cmp > 0)
@@ -86,10 +92,6 @@
this.cmps = cmps;
}
- public int size() {
- return cmps.length;
- }
-
public int getFieldCount() {
return typeTraits.length;
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
similarity index 94%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
index c074642..4fc1861 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/SlotOffTupleOff.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/SlotOffTupleOff.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public class SlotOffTupleOff implements Comparable<SlotOffTupleOff> {
public int tupleIndex;
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
similarity index 70%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index d2b2892..f9e00ac 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -13,14 +13,14 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
import java.nio.ByteBuffer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-public class SimpleTupleReference implements IBTreeTupleReference {
+public class SimpleTupleReference implements ITreeIndexTupleReference {
protected ByteBuffer buf;
protected int fieldStartIndex;
@@ -30,14 +30,14 @@
protected int fieldSlotsBytes;
@Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
this.buf = buf;
this.tupleStartOff = tupleStartOff;
}
@Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
}
@Override
@@ -67,10 +67,10 @@
@Override
public int getFieldLength(int fIdx) {
if (fIdx == 0) {
- return buf.getInt(tupleStartOff + nullFlagsBytes);
+ return buf.getShort(tupleStartOff + nullFlagsBytes);
} else {
- return buf.getInt(tupleStartOff + nullFlagsBytes + fIdx * 4)
- - buf.getInt(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 4));
+ return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
+ - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
}
}
@@ -80,7 +80,7 @@
return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
} else {
return tupleStartOff + nullFlagsBytes + fieldSlotsBytes
- + buf.getInt(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 4));
+ + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
}
}
@@ -89,6 +89,6 @@
}
protected int getFieldSlotsBytes() {
- return fieldCount * 4;
+ return fieldCount * 2;
}
-}
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
similarity index 74%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
index 1eaa90d..1730c4a 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -13,15 +13,15 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-public class SimpleTupleWriter implements IBTreeTupleWriter {
+public class SimpleTupleWriter implements ITreeIndexTupleWriter {
@Override
public int bytesRequired(ITupleReference tuple) {
@@ -42,7 +42,7 @@
}
@Override
- public IBTreeTupleReference createTupleReference() {
+ public ITreeIndexTupleReference createTupleReference() {
return new SimpleTupleReference();
}
@@ -58,11 +58,11 @@
int fieldEndOff = 0;
for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
fieldEndOff += tuple.getFieldLength(i);
runner += tuple.getFieldLength(i);
- targetBuf.putInt(targetOff + nullFlagsBytes + i * 4, fieldEndOff);
+ targetBuf.putShort(targetOff + nullFlagsBytes + i * 2, (short) fieldEndOff);
}
return runner - targetOff;
@@ -81,30 +81,30 @@
int fieldEndOff = 0;
int fieldCounter = 0;
for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
fieldEndOff += tuple.getFieldLength(i);
runner += tuple.getFieldLength(i);
- targetBuf.putInt(targetOff + nullFlagsBytes + fieldCounter * 4, fieldEndOff);
+ targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
fieldCounter++;
}
return runner - targetOff;
}
- private int getNullFlagsBytes(ITupleReference tuple) {
+ protected int getNullFlagsBytes(ITupleReference tuple) {
return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
}
- private int getFieldSlotsBytes(ITupleReference tuple) {
- return tuple.getFieldCount() * 4;
+ protected int getFieldSlotsBytes(ITupleReference tuple) {
+ return tuple.getFieldCount() * 2;
}
- private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+ protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
return (int) Math.ceil((double) numFields / 8.0);
}
- private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
- return numFields * 4;
+ protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ return numFields * 2;
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
similarity index 68%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
index 3b06153..ebb2905 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/SimpleTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriterFactory.java
@@ -13,17 +13,17 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class SimpleTupleWriterFactory implements IBTreeTupleWriterFactory {
+public class SimpleTupleWriterFactory implements ITreeIndexTupleWriterFactory {
private static final long serialVersionUID = 1L;
@Override
- public IBTreeTupleWriter createTupleWriter() {
+ public ITreeIndexTupleWriter createTupleWriter() {
return new SimpleTupleWriter();
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
similarity index 82%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
index b82adb8..4f571b3 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
@@ -13,15 +13,15 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-public class TypeAwareTupleReference implements IBTreeTupleReference {
+public class TypeAwareTupleReference implements ITreeIndexTupleReference {
protected ByteBuffer buf;
protected int fieldStartIndex;
protected int fieldCount;
@@ -29,9 +29,9 @@
protected int nullFlagsBytes;
protected int dataStartOff;
- private ITypeTrait[] typeTraits;
- private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
- private int[] decodedFieldSlots;
+ protected ITypeTrait[] typeTraits;
+ protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+ protected int[] decodedFieldSlots;
public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
this.typeTraits = typeTraits;
@@ -39,7 +39,7 @@
}
@Override
- public void resetByOffset(ByteBuffer buf, int tupleStartOff) {
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
this.buf = buf;
this.tupleStartOff = tupleStartOff;
@@ -62,8 +62,8 @@
}
@Override
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex) {
- resetByOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
}
@Override
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
similarity index 79%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
index c975dae..81b48e5 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -13,19 +13,19 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
import java.nio.ByteBuffer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-public class TypeAwareTupleWriter implements IBTreeTupleWriter {
+public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
- private ITypeTrait[] typeTraits;
- private VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+ protected ITypeTrait[] typeTraits;
+ protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
this.typeTraits = typeTraits;
@@ -42,7 +42,7 @@
@Override
public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
- int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+ int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
for (int i = startField; i < startField + numFields; i++) {
bytes += tuple.getFieldLength(i);
}
@@ -50,7 +50,7 @@
}
@Override
- public IBTreeTupleReference createTupleReference() {
+ public ITreeIndexTupleReference createTupleReference() {
return new TypeAwareTupleReference(typeTraits);
}
@@ -74,8 +74,8 @@
// write data fields
for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
runner += tuple.getFieldLength(i);
}
@@ -86,7 +86,7 @@
public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
int targetOff) {
int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+ int nullFlagsBytes = getNullFlagsBytes(numFields);
// write null indicator bits
for (int i = 0; i < nullFlagsBytes; i++) {
targetBuf.put(runner++, (byte) 0);
@@ -102,19 +102,19 @@
runner = encDec.getPos();
for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
runner += tuple.getFieldLength(i);
}
return runner - targetOff;
}
- private int getNullFlagsBytes(ITupleReference tuple) {
+ protected int getNullFlagsBytes(ITupleReference tuple) {
return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
}
- private int getFieldSlotsBytes(ITupleReference tuple) {
+ protected int getFieldSlotsBytes(ITupleReference tuple) {
int fieldSlotBytes = 0;
for (int i = 0; i < tuple.getFieldCount(); i++) {
if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
@@ -124,11 +124,11 @@
return fieldSlotBytes;
}
- private int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+ protected int getNullFlagsBytes(int numFields) {
return (int) Math.ceil((double) numFields / 8.0);
}
- private int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
int fieldSlotBytes = 0;
for (int i = startField; i < startField + numFields; i++) {
if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
similarity index 73%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
index bfaa120..82072ae 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriterFactory.java
@@ -13,13 +13,13 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class TypeAwareTupleWriterFactory implements IBTreeTupleWriterFactory {
+public class TypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
private static final long serialVersionUID = 1L;
private ITypeTrait[] typeTraits;
@@ -29,7 +29,7 @@
}
@Override
- public IBTreeTupleWriter createTupleWriter() {
+ public ITreeIndexTupleWriter createTupleWriter() {
return new TypeAwareTupleWriter(typeTraits);
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
similarity index 97%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
index 8e40855..d266d41 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/VarLenIntEncoderDecoder.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/VarLenIntEncoderDecoder.java
@@ -13,7 +13,7 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.common.tuples;
// encodes positive integers in a variable-byte format
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
new file mode 100644
index 0000000..eb261da
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
@@ -0,0 +1,84 @@
+package edu.uci.ics.hyracks.storage.am.common.utility;
+
+import java.util.ArrayList;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class TreeIndexBufferCacheWarmup {
+ private final IBufferCache bufferCache;
+ private final IFreePageManager freePageManager;
+ private final int fileId;
+ private final ArrayList<IntArrayList> pagesByLevel = new ArrayList<IntArrayList>();
+ private final Random rnd = new Random();
+
+ public TreeIndexBufferCacheWarmup(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.fileId = fileId;
+ }
+
+ public void warmup(ITreeIndexFrame frame, ITreeIndexMetaDataFrame metaFrame, int[] warmupTreeLevels,
+ int[] warmupRepeats) throws HyracksDataException {
+ bufferCache.openFile(fileId);
+
+ // scan entire file to determine pages in each level
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
+ for (int pageId = 0; pageId <= maxPageId; pageId++) {
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ try {
+ frame.setPage(page);
+ byte level = frame.getLevel();
+ while (level >= pagesByLevel.size()) {
+ pagesByLevel.add(new IntArrayList(100, 100));
+ }
+ if (level >= 0) {
+ // System.out.println("ADDING: " + level + " " + pageId);
+ pagesByLevel.get(level).add(pageId);
+ }
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ }
+
+ // pin certain pages again to simulate frequent access
+ for (int i = 0; i < warmupTreeLevels.length; i++) {
+ if (warmupTreeLevels[i] < pagesByLevel.size()) {
+ int repeats = warmupRepeats[i];
+ IntArrayList pageIds = pagesByLevel.get(warmupTreeLevels[i]);
+ int[] remainingPageIds = new int[pageIds.size()];
+ for (int r = 0; r < repeats; r++) {
+ for (int j = 0; j < pageIds.size(); j++) {
+ remainingPageIds[j] = pageIds.get(j);
+ }
+
+ int remainingLength = pageIds.size();
+ for (int j = 0; j < pageIds.size(); j++) {
+ int index = Math.abs(rnd.nextInt()) % remainingLength;
+ int pageId = remainingPageIds[index];
+
+ // pin & latch then immediately unlatch & unpin
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+
+ remainingPageIds[index] = remainingPageIds[remainingLength - 1];
+ remainingLength--;
+ }
+ }
+ }
+ }
+
+ bufferCache.closeFile(fileId);
+ }
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
new file mode 100644
index 0000000..5b01b2d
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
@@ -0,0 +1,134 @@
+package edu.uci.ics.hyracks.storage.am.common.utility;
+
+import java.text.DecimalFormat;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+
+public class TreeIndexStats {
+
+ private TreeIndexNodeTypeStats rootStats = new TreeIndexNodeTypeStats();
+ private TreeIndexNodeTypeStats interiorStats = new TreeIndexNodeTypeStats();
+ private TreeIndexNodeTypeStats leafStats = new TreeIndexNodeTypeStats();
+
+ private int freePages = 0;
+ private int metaPages = 0;
+ private int treeLevels = 0;
+
+ public void begin() {
+ rootStats.clear();
+ interiorStats.clear();
+ leafStats.clear();
+ freePages = 0;
+ metaPages = 0;
+ treeLevels = 0;
+ }
+
+ public void addRoot(ITreeIndexFrame frame) {
+ treeLevels = frame.getLevel() + 1;
+ rootStats.add(frame);
+ }
+
+ public void add(ITreeIndexFrame frame) {
+ if (frame.isLeaf()) {
+ leafStats.add(frame);
+ } else if (frame.isInterior()) {
+ interiorStats.add(frame);
+ }
+ }
+
+ public void add(ITreeIndexMetaDataFrame metaFrame, IFreePageManager freePageManager) {
+ if (freePageManager.isFreePage(metaFrame)) {
+ freePages++;
+ } else if (freePageManager.isMetaPage(metaFrame)) {
+ metaPages++;
+ }
+ }
+
+ public void end() {
+ // nothing here currently
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder strBuilder = new StringBuilder();
+ DecimalFormat df = new DecimalFormat("#####.##");
+
+ strBuilder.append("TREE LEVELS: " + treeLevels + "\n");
+ strBuilder.append("FREE PAGES : " + freePages + "\n");
+ strBuilder.append("META PAGES : " + metaPages + "\n");
+ long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
+ strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
+
+ strBuilder.append("\n");
+ strBuilder.append("ROOT STATS" + "\n");
+ strBuilder.append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
+ strBuilder.append("FILL FACTOR : " + df.format(rootStats.getAvgFillFactor()) + "\n");
+
+ if (interiorStats.getNumPages() > 0) {
+ strBuilder.append("\n");
+ strBuilder.append("INTERIOR STATS" + "\n");
+ strBuilder.append("NUM PAGES: " + interiorStats.getNumPages() + "\n");
+ strBuilder.append("NUM TUPLES: " + interiorStats.getNumTuples() + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
+ }
+
+ if (leafStats.getNumPages() > 0) {
+ strBuilder.append("\n");
+ strBuilder.append("LEAF STATS" + "\n");
+ strBuilder.append("NUM PAGES: " + df.format(leafStats.getNumPages()) + "\n");
+ strBuilder.append("NUM TUPLES: " + df.format(leafStats.getNumTuples()) + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
+ }
+
+ return strBuilder.toString();
+ }
+
+ public class TreeIndexNodeTypeStats {
+ private long numTuples;
+ private long sumTuplesSizes;
+ private long numPages;
+ private double sumFillFactors;
+
+ public void clear() {
+ numTuples = 0;
+ sumTuplesSizes = 0;
+ numPages = 0;
+ }
+
+ public void add(ITreeIndexFrame frame) {
+ numPages++;
+ numTuples += frame.getTupleCount();
+ sumFillFactors += (double) (frame.getBuffer().capacity() - frame.getTotalFreeSpace())
+ / (double) frame.getBuffer().capacity();
+ }
+
+ public long getNumTuples() {
+ return numTuples;
+ }
+
+ public long getSumTupleSizes() {
+ return sumTuplesSizes;
+ }
+
+ public long getNumPages() {
+ return numPages;
+ }
+
+ public double getAvgNumTuples() {
+ return (double) numTuples / (double) numPages;
+ }
+
+ public double getAvgTupleSize() {
+ return (double) sumTuplesSizes / (double) numTuples;
+ }
+
+ public double getAvgFillFactor() {
+ return sumFillFactors / numPages;
+ }
+ }
+
+}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
new file mode 100644
index 0000000..fc0ab5e
--- /dev/null
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
@@ -0,0 +1,70 @@
+package edu.uci.ics.hyracks.storage.am.common.utility;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class TreeIndexStatsGatherer {
+
+ private final TreeIndexStats treeIndexStats = new TreeIndexStats();
+ private final IBufferCache bufferCache;
+ private final IFreePageManager freePageManager;
+ private final int fileId;
+ private final int rootPage;
+
+ public TreeIndexStatsGatherer(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId, int rootPage) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.fileId = fileId;
+ this.rootPage = rootPage;
+ }
+
+ public TreeIndexStats gatherStats(ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+
+ bufferCache.openFile(fileId);
+
+ treeIndexStats.begin();
+
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
+ for (int pageId = 0; pageId <= maxPageId; pageId++) {
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ try {
+ metaFrame.setPage(page);
+ leafFrame.setPage(page);
+ interiorFrame.setPage(page);
+
+ if (leafFrame.isLeaf()) {
+ if (pageId == rootPage) {
+ treeIndexStats.addRoot(leafFrame);
+ } else {
+ treeIndexStats.add(leafFrame);
+ }
+ } else if (interiorFrame.isInterior()) {
+ if (pageId == rootPage) {
+ treeIndexStats.addRoot(interiorFrame);
+ } else {
+ treeIndexStats.add(interiorFrame);
+ }
+ } else {
+ treeIndexStats.add(metaFrame, freePageManager);
+ }
+
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ }
+
+ treeIndexStats.end();
+
+ bufferCache.closeFile(fileId);
+
+ return treeIndexStats;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/pom.xml b/hyracks-storage-am-invertedindex/pom.xml
index 78a053c..ca40e57 100644
--- a/hyracks-storage-am-invertedindex/pom.xml
+++ b/hyracks-storage-am-invertedindex/pom.xml
@@ -58,7 +58,7 @@
<version>0.1.7-SNAPSHOT</version>
<type>jar</type>
<scope>compile</scope>
- </dependency>
+ </dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java
deleted file mode 100644
index 0d5dc8f..0000000
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizer.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.api;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-
-public interface IBinaryTokenizer {
-
- public void reset(byte[] data, int start, int length);
-
- public boolean hasNext();
-
- public void next();
-
- public int getTokenStartOff();
-
- public int getTokenLength();
-
- public int getNumTokens();
-
- public void writeToken(DataOutput dos) throws IOException;
-
- public RecordDescriptor getTokenSchema();
-}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptorHelper.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptorHelper.java
new file mode 100644
index 0000000..b3afe4a
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexOperatorDescriptorHelper.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+
+public interface IInvertedIndexOperatorDescriptorHelper extends ITreeIndexOperatorDescriptorHelper {
+ public IFileSplitProvider getInvIndexFileSplitProvider();
+
+ public IBinaryComparatorFactory[] getInvIndexComparatorFactories();
+
+ public ITypeTrait[] getInvIndexTypeTraits();
+
+ public IIndexRegistryProvider<InvertedIndex> getInvIndexRegistryProvider();
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
index 990936e..fb73a65 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexResultCursor.java
@@ -15,14 +15,14 @@
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
-import java.nio.ByteBuffer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface IInvertedIndexResultCursor {
public boolean hasNext();
public void next();
- public ByteBuffer getBuffer();
+ public ITupleReference getTuple();
- public void reset();
+ public void reset(IInvertedIndexSearcher invIndexSearcher);
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java
similarity index 76%
rename from hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
rename to hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java
index 7e91fd4..8f7a3e3 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearchModifier.java
@@ -15,8 +15,10 @@
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
-import java.io.Serializable;
+import java.util.List;
-public interface IBinaryTokenizerFactory extends Serializable {
- public IBinaryTokenizer createBinaryTokenizer();
+public interface IInvertedIndexSearchModifier {
+ int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors);
+
+ int getPrefixLists(List<IInvertedListCursor> invListCursors);
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
index 98dc3b9..36d64ea 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedIndexSearcher.java
@@ -15,10 +15,21 @@
package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface IInvertedIndexSearcher {
- public void search(ITupleReference queryTuple, int queryFieldIndex) throws Exception;
+ public void search(IInvertedIndexResultCursor resultCursor, ITupleReference queryTuple, int queryFieldIndex,
+ IInvertedIndexSearchModifier searchModifier) throws Exception;
- public IInvertedIndexResultCursor getResultCursor();
+ public IFrameTupleAccessor createResultFrameTupleAccessor();
+
+ public ITupleReference createResultTupleReference();
+
+ public List<ByteBuffer> getResultBuffers();
+
+ public int getNumValidResultBuffers();
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java
new file mode 100644
index 0000000..aaaef56
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListBuilder.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public interface IInvertedListBuilder {
+ public boolean startNewList(ITupleReference tuple, int numTokenFields);
+
+ // returns true if successfully appended
+ // returns false if not enough space in targetBuf
+ public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields);
+
+ public void setTargetBuffer(byte[] targetBuf, int startPos);
+
+ public int getListSize();
+
+ public int getPos();
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java
new file mode 100644
index 0000000..9435f3c
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IInvertedListCursor.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public interface IInvertedListCursor extends Comparable<IInvertedListCursor> {
+ void reset(int startPageId, int endPageId, int startOff, int numElements);
+
+ void pinPagesSync() throws HyracksDataException;
+
+ void pinPagesAsync() throws HyracksDataException;
+
+ void unpinPages() throws HyracksDataException;
+
+ boolean hasNext();
+
+ void next();
+
+ ITupleReference getTuple();
+
+ // getters
+ int getNumElements();
+
+ int getStartPageId();
+
+ int getEndPageId();
+
+ int getStartOff();
+
+ // jump to a specific element
+ void positionCursor(int elementIx);
+
+ boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp);
+
+ // for debugging
+ String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException;
+
+ String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException;
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
new file mode 100644
index 0000000..0d0c5bf
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/AbstractInvertedIndexOperatorDescriptor.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public abstract class AbstractInvertedIndexOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor
+ implements IInvertedIndexOperatorDescriptorHelper {
+
+ private static final long serialVersionUID = 1L;
+
+ // general
+ protected final IStorageManagerInterface storageManager;
+
+ // btree
+ protected final IFileSplitProvider btreeFileSplitProvider;
+ protected final IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
+ protected final ITreeIndexFrameFactory interiorFrameFactory;
+ protected final ITreeIndexFrameFactory leafFrameFactory;
+ protected final ITypeTrait[] btreeTypeTraits;
+ protected final IBinaryComparatorFactory[] btreeComparatorFactories;
+ protected final ITreeIndexOpHelperFactory opHelperFactory;
+
+ // inverted index
+ protected final IFileSplitProvider invIndexFileSplitProvider;
+ protected final IIndexRegistryProvider<InvertedIndex> invIndexRegistryProvider;
+ protected final ITypeTrait[] invIndexTypeTraits;
+ protected final IBinaryComparatorFactory[] invIndexComparatorFactories;
+
+ public AbstractInvertedIndexOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
+ RecordDescriptor recDesc, IStorageManagerInterface storageManager,
+ IFileSplitProvider btreeFileSplitProvider, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] btreeTypeTraits, IBinaryComparatorFactory[] btreeComparatorFactories, float btreeFillFactor,
+ ITreeIndexOpHelperFactory opHelperFactory, IFileSplitProvider invIndexFileSplitProvider,
+ IIndexRegistryProvider<InvertedIndex> invIndexRegistryProvider, ITypeTrait[] invIndexTypeTraits,
+ IBinaryComparatorFactory[] invIndexComparatorFactories) {
+ super(spec, inputArity, outputArity);
+
+ // general
+ this.storageManager = storageManager;
+
+ // btree
+ this.btreeFileSplitProvider = btreeFileSplitProvider;
+ this.treeIndexRegistryProvider = treeIndexRegistryProvider;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.btreeTypeTraits = btreeTypeTraits;
+ this.btreeComparatorFactories = btreeComparatorFactories;
+ this.opHelperFactory = opHelperFactory;
+
+ // inverted index
+ this.invIndexFileSplitProvider = invIndexFileSplitProvider;
+ this.invIndexRegistryProvider = invIndexRegistryProvider;
+ this.invIndexTypeTraits = invIndexTypeTraits;
+ this.invIndexComparatorFactories = invIndexComparatorFactories;
+
+ if (outputArity > 0)
+ recordDescriptors[0] = recDesc;
+ }
+
+ @Override
+ public IFileSplitProvider getTreeIndexFileSplitProvider() {
+ return btreeFileSplitProvider;
+ }
+
+ @Override
+ public IBinaryComparatorFactory[] getTreeIndexComparatorFactories() {
+ return btreeComparatorFactories;
+ }
+
+ @Override
+ public ITypeTrait[] getTreeIndexTypeTraits() {
+ return btreeTypeTraits;
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getTreeIndexInteriorFactory() {
+ return interiorFrameFactory;
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getTreeIndexLeafFactory() {
+ return leafFrameFactory;
+ }
+
+ @Override
+ public IStorageManagerInterface getStorageManager() {
+ return storageManager;
+ }
+
+ @Override
+ public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider() {
+ return treeIndexRegistryProvider;
+ }
+
+ @Override
+ public RecordDescriptor getRecordDescriptor() {
+ return recordDescriptors[0];
+ }
+
+ @Override
+ public IIndexRegistryProvider<InvertedIndex> getInvIndexRegistryProvider() {
+ return invIndexRegistryProvider;
+ }
+
+ @Override
+ public IBinaryComparatorFactory[] getInvIndexComparatorFactories() {
+ return invIndexComparatorFactories;
+ }
+
+ @Override
+ public IFileSplitProvider getInvIndexFileSplitProvider() {
+ return invIndexFileSplitProvider;
+ }
+
+ @Override
+ public ITypeTrait[] getInvIndexTypeTraits() {
+ return invIndexTypeTraits;
+ }
+
+ @Override
+ public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
+ return opHelperFactory;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
index 059a1e2..83246d6 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorDescriptor.java
@@ -23,7 +23,7 @@
import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
import edu.uci.ics.hyracks.api.job.JobSpecification;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizerFactory;
public class BinaryTokenizerOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor {
@@ -50,6 +50,6 @@
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return new BinaryTokenizerOperatorNodePushable(ctx, recordDescProvider.getInputRecordDescriptor(odId, 0),
- recordDescriptors[0], tokenizerFactory.createBinaryTokenizer(), tokenFields, projFields);
+ recordDescriptors[0], tokenizerFactory.createTokenizer(), tokenFields, projFields);
}
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
index 9dac535..0647f45 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/BinaryTokenizerOperatorNodePushable.java
@@ -27,7 +27,8 @@
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
public class BinaryTokenizerOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
@@ -84,7 +85,8 @@
builder.reset();
try {
- tokenizer.writeToken(builderDos);
+ IToken token = tokenizer.getToken();
+ token.serializeToken(builderDos);
builder.addFieldEndOffset();
} catch (IOException e) {
throw new HyracksDataException(e.getMessage());
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java
new file mode 100644
index 0000000..d003580
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorDescriptor.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class InvertedIndexBulkLoadOperatorDescriptor extends AbstractInvertedIndexOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private final int[] fieldPermutation;
+ private final float btreeFillFactor;
+ private final IInvertedListBuilder invListBuilder;
+
+ public InvertedIndexBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
+ int[] fieldPermutation, IFileSplitProvider btreeFileSplitProvider,
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] btreeTypeTraits,
+ IBinaryComparatorFactory[] btreeComparatorFactories, float btreeFillFactor,
+ ITreeIndexOpHelperFactory opHelperFactory, IFileSplitProvider invIndexFileSplitProvider,
+ IIndexRegistryProvider<InvertedIndex> invIndexRegistryProvider, ITypeTrait[] invIndexTypeTraits,
+ IBinaryComparatorFactory[] invIndexComparatorFactories, IInvertedListBuilder invListBuilder) {
+ super(spec, 1, 0, null, storageManager, btreeFileSplitProvider, treeIndexRegistryProvider,
+ interiorFrameFactory, leafFrameFactory, btreeTypeTraits, btreeComparatorFactories, btreeFillFactor,
+ opHelperFactory, invIndexFileSplitProvider, invIndexRegistryProvider, invIndexTypeTraits,
+ invIndexComparatorFactories);
+ this.fieldPermutation = fieldPermutation;
+ this.btreeFillFactor = btreeFillFactor;
+ this.invListBuilder = invListBuilder;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new InvertedIndexBulkLoadOperatorNodePushable(this, ctx, partition, fieldPermutation, btreeFillFactor,
+ invListBuilder, recordDescProvider);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java
new file mode 100644
index 0000000..4969124
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexBulkLoadOperatorNodePushable.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputSinkOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+
+public class InvertedIndexBulkLoadOperatorNodePushable extends AbstractUnaryInputSinkOperatorNodePushable {
+ private final TreeIndexOpHelper treeIndexOpHelper;
+ private float btreeFillFactor;
+
+ private final InvertedIndexOpHelper invIndexOpHelper;
+ protected final IInvertedListBuilder invListBuilder;
+ private InvertedIndex.BulkLoadContext bulkLoadCtx;
+
+ private final IHyracksStageletContext ctx;
+
+ private FrameTupleAccessor accessor;
+ private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
+
+ private IRecordDescriptorProvider recordDescProvider;
+
+ public InvertedIndexBulkLoadOperatorNodePushable(AbstractInvertedIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition, int[] fieldPermutation, float btreeFillFactor,
+ IInvertedListBuilder invListBuilder, IRecordDescriptorProvider recordDescProvider) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.CREATE);
+ invIndexOpHelper = new InvertedIndexOpHelper(opDesc, ctx, partition, IndexHelperOpenMode.CREATE);
+ this.btreeFillFactor = btreeFillFactor;
+ this.recordDescProvider = recordDescProvider;
+ this.ctx = ctx;
+ this.invListBuilder = invListBuilder;
+ tuple.setFieldPermutation(fieldPermutation);
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ AbstractInvertedIndexOperatorDescriptor opDesc = (AbstractInvertedIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ RecordDescriptor recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
+
+ // btree
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+ } catch (Exception e) {
+ // cleanup in case of failure
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+
+ // inverted index
+ try {
+ invIndexOpHelper.init();
+ invIndexOpHelper.getInvIndex().open(invIndexOpHelper.getInvIndexFileId());
+ bulkLoadCtx = invIndexOpHelper.getInvIndex().beginBulkLoad(invListBuilder, ctx.getFrameSize(),
+ btreeFillFactor);
+ } catch (Exception e) {
+ // cleanup in case of failure
+ invIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+ int tupleCount = accessor.getTupleCount();
+ for (int i = 0; i < tupleCount; i++) {
+ tuple.reset(accessor, i);
+ invIndexOpHelper.getInvIndex().bulkLoadAddTuple(bulkLoadCtx, tuple);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ invIndexOpHelper.getInvIndex().endBulkLoad(bulkLoadCtx);
+ } finally {
+ treeIndexOpHelper.deinit();
+ }
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java
new file mode 100644
index 0000000..c16cfcd
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/dataflow/InvertedIndexOpHelper.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.storage.am.invertedindex.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+
+public final class InvertedIndexOpHelper {
+
+ private InvertedIndex invIndex;
+ private int invIndexFileId = -1;
+ private int partition;
+
+ private IInvertedIndexOperatorDescriptorHelper opDesc;
+ private IHyracksStageletContext ctx;
+
+ private IndexHelperOpenMode mode;
+
+ public InvertedIndexOpHelper(IInvertedIndexOperatorDescriptorHelper opDesc, final IHyracksStageletContext ctx,
+ int partition, IndexHelperOpenMode mode) {
+ this.opDesc = opDesc;
+ this.ctx = ctx;
+ this.mode = mode;
+ this.partition = partition;
+ }
+
+ public void init() throws HyracksDataException {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+ IFileSplitProvider fileSplitProvider = opDesc.getInvIndexFileSplitProvider();
+
+ FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+ boolean fileIsMapped = fileMapProvider.isMapped(f);
+
+ switch (mode) {
+
+ case OPEN: {
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Trying to open inverted index from unmapped file " + f.toString());
+ }
+ }
+ break;
+
+ case CREATE:
+ case ENLIST: {
+ if (!fileIsMapped) {
+ bufferCache.createFile(f);
+ }
+ }
+ break;
+
+ }
+
+ int fileId = fileMapProvider.lookupFileId(f);
+ try {
+ bufferCache.openFile(fileId);
+ } catch (HyracksDataException e) {
+ // revert state of buffer cache since file failed to open
+ if (!fileIsMapped) {
+ bufferCache.deleteFile(fileId);
+ }
+ throw e;
+ }
+
+ // only set btreeFileId member when openFile() succeeds,
+ // otherwise deinit() will try to close the file that failed to open
+ invIndexFileId = fileId;
+ IndexRegistry<InvertedIndex> invIndexRegistry = opDesc.getInvIndexRegistryProvider().getRegistry(ctx);
+ invIndex = invIndexRegistry.get(invIndexFileId);
+ if (invIndex == null) {
+
+ // create new inverted index and register it
+ invIndexRegistry.lock();
+ try {
+ // check if inverted index has already been registered by
+ // another thread
+ invIndex = invIndexRegistry.get(invIndexFileId);
+ if (invIndex == null) {
+ // this thread should create and register the inverted index
+
+ IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getInvIndexComparatorFactories().length];
+ for (int i = 0; i < opDesc.getInvIndexComparatorFactories().length; i++) {
+ comparators[i] = opDesc.getInvIndexComparatorFactories()[i].createBinaryComparator();
+ }
+
+ MultiComparator cmp = new MultiComparator(opDesc.getInvIndexTypeTraits(), comparators);
+
+ // assumes btree has already been registered
+ IFileSplitProvider btreeFileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
+ IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider()
+ .getRegistry(ctx);
+ FileReference btreeFile = btreeFileSplitProvider.getFileSplits()[partition].getLocalFile();
+ boolean btreeFileIsMapped = fileMapProvider.isMapped(btreeFile);
+ if (!btreeFileIsMapped) {
+ throw new HyracksDataException(
+ "Trying to create inverted index, but associated BTree file has not been mapped");
+ }
+ int btreeFileId = fileMapProvider.lookupFileId(f);
+ BTree btree = (BTree) treeIndexRegistry.get(btreeFileId);
+
+ invIndex = new InvertedIndex(bufferCache, btree, cmp);
+ invIndex.open(invIndexFileId);
+ invIndexRegistry.register(invIndexFileId, invIndex);
+ }
+ } finally {
+ invIndexRegistry.unlock();
+ }
+ }
+ }
+
+ public void deinit() throws HyracksDataException {
+ if (invIndexFileId != -1) {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ bufferCache.closeFile(invIndexFileId);
+ }
+ }
+
+ public InvertedIndex getInvIndex() {
+ return invIndex;
+ }
+
+ public IHyracksStageletContext getHyracksStageletContext() {
+ return ctx;
+ }
+
+ public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+ return opDesc;
+ }
+
+ public int getInvIndexFileId() {
+ return invIndexFileId;
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java
new file mode 100644
index 0000000..03fc9a1
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListBuilder.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+
+public class FixedSizeElementInvertedListBuilder implements IInvertedListBuilder {
+ private final int listElementSize;
+ private int listSize = 0;
+
+ private byte[] targetBuf;
+ private int pos;
+
+ public FixedSizeElementInvertedListBuilder(ITypeTrait[] invListFields) {
+ int tmp = 0;
+ for (int i = 0; i < invListFields.length; i++) {
+ tmp += invListFields[i].getStaticallyKnownDataLength();
+ }
+ listElementSize = tmp;
+ }
+
+ @Override
+ public boolean startNewList(ITupleReference tuple, int tokenField) {
+ if (pos + listElementSize > targetBuf.length)
+ return false;
+ else {
+ listSize = 0;
+ return true;
+ }
+ }
+
+ @Override
+ public boolean appendElement(ITupleReference tuple, int numTokenFields, int numElementFields) {
+ if (pos + listElementSize > targetBuf.length)
+ return false;
+
+ for (int i = 0; i < numElementFields; i++) {
+ int field = numTokenFields + i;
+ System.arraycopy(tuple.getFieldData(field), tuple.getFieldStart(field), targetBuf, pos,
+ tuple.getFieldLength(field));
+ }
+
+ listSize++;
+ pos += listElementSize;
+
+ return true;
+ }
+
+ @Override
+ public void setTargetBuffer(byte[] targetBuf, int startPos) {
+ this.targetBuf = targetBuf;
+ this.pos = startPos;
+ }
+
+ @Override
+ public int getListSize() {
+ return listSize;
+ }
+
+ @Override
+ public int getPos() {
+ return pos;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java
new file mode 100644
index 0000000..f7ef56e
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeElementInvertedListCursor.java
@@ -0,0 +1,280 @@
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class FixedSizeElementInvertedListCursor implements IInvertedListCursor {
+
+ private final IBufferCache bufferCache;
+ private final int fileId;
+ private final int elementSize;
+ private int currentElementIx;
+ private int currentOff;
+ private int currentPageIx;
+
+ private int startPageId;
+ private int endPageId;
+ private int startOff;
+ private int numElements;
+
+ private final FixedSizeTupleReference tuple;
+ private ICachedPage[] pages = new ICachedPage[10];
+ private int[] elementIndexes = new int[10];
+
+ public FixedSizeElementInvertedListCursor(IBufferCache bufferCache, int fileId, ITypeTrait[] invListFields) {
+ this.bufferCache = bufferCache;
+ this.fileId = fileId;
+ this.currentElementIx = 0;
+ this.currentPageIx = 0;
+
+ int tmp = 0;
+ for (int i = 0; i < invListFields.length; i++) {
+ tmp += invListFields[i].getStaticallyKnownDataLength();
+ }
+ elementSize = tmp;
+ this.currentOff = -elementSize;
+ this.tuple = new FixedSizeTupleReference(invListFields);
+ }
+
+ @Override
+ public boolean hasNext() {
+ if (currentElementIx < numElements)
+ return true;
+ else
+ return false;
+ }
+
+ @Override
+ public void next() {
+ if (currentOff + elementSize >= bufferCache.getPageSize()) {
+ currentPageIx++;
+ currentOff = 0;
+ } else {
+ currentOff += elementSize;
+ }
+
+ currentElementIx++;
+ tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
+ }
+
+ @Override
+ public void pinPagesAsync() {
+ // TODO: implement
+ }
+
+ @Override
+ public void pinPagesSync() throws HyracksDataException {
+ int pix = 0;
+ for (int i = startPageId; i <= endPageId; i++) {
+ pages[pix] = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i), false);
+ pages[pix].acquireReadLatch();
+ pix++;
+ }
+ }
+
+ @Override
+ public void unpinPages() throws HyracksDataException {
+ int numPages = endPageId - startPageId + 1;
+ for (int i = 0; i < numPages; i++) {
+ pages[i].releaseReadLatch();
+ bufferCache.unpin(pages[i]);
+ }
+ }
+
+ @Override
+ public void positionCursor(int elementIx) {
+ int numPages = endPageId - startPageId + 1;
+
+ currentPageIx = binarySearch(elementIndexes, 0, numPages, elementIx);
+ if (currentPageIx < 0) {
+ throw new IndexOutOfBoundsException("Requested index: " + elementIx + " from array with numElements: "
+ + numElements);
+ }
+
+ if (currentPageIx == 0) {
+ currentOff = startOff + elementIx * elementSize;
+ } else {
+ int relativeElementIx = elementIx - elementIndexes[currentPageIx - 1] - 1;
+ currentOff = relativeElementIx * elementSize;
+ }
+
+ currentElementIx = elementIx;
+ tuple.reset(pages[currentPageIx].getBuffer().array(), currentOff);
+ }
+
+ @Override
+ public boolean containsKey(ITupleReference searchTuple, MultiComparator invListCmp) {
+ int mid;
+ int begin = 0;
+ int end = numElements - 1;
+
+ while (begin <= end) {
+ mid = (begin + end) / 2;
+ positionCursor(mid);
+ int cmp = invListCmp.compare(searchTuple, tuple);
+ if (cmp < 0) {
+ end = mid - 1;
+ } else if (cmp > 0) {
+ begin = mid + 1;
+ } else {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ @Override
+ public void reset(int startPageId, int endPageId, int startOff, int numElements) {
+ this.startPageId = startPageId;
+ this.endPageId = endPageId;
+ this.startOff = startOff;
+ this.numElements = numElements;
+ this.currentElementIx = 0;
+ this.currentPageIx = 0;
+ this.currentOff = startOff - elementSize;
+
+ int numPages = endPageId - startPageId + 1;
+ if (numPages > pages.length) {
+ pages = new ICachedPage[endPageId - startPageId + 1];
+ elementIndexes = new int[endPageId - startPageId + 1];
+ }
+
+ // fill elementIndexes
+ // first page
+ int cumulElements = (bufferCache.getPageSize() - startOff) / elementSize;
+ elementIndexes[0] = cumulElements - 1;
+
+ // middle, full pages
+ for (int i = 1; i < numPages - 1; i++) {
+ elementIndexes[i] = elementIndexes[i - 1] + (bufferCache.getPageSize() / elementSize);
+ }
+
+ // last page
+ elementIndexes[numPages - 1] = numElements - 1;
+ }
+
+ @Override
+ public String printInvList(ISerializerDeserializer[] serdes) throws HyracksDataException {
+ int oldCurrentOff = currentOff;
+ int oldCurrentPageId = currentPageIx;
+ int oldCurrentElementIx = currentElementIx;
+
+ currentOff = startOff - elementSize;
+ currentPageIx = 0;
+ currentElementIx = 0;
+
+ StringBuilder strBuilder = new StringBuilder();
+
+ int count = 0;
+ while (hasNext()) {
+ next();
+ count++;
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = serdes[i].deserialize(dataIn);
+ strBuilder.append(o.toString());
+ if (i + 1 < tuple.getFieldCount())
+ strBuilder.append(",");
+ }
+ strBuilder.append(" ");
+ }
+
+ // reset previous state
+ currentOff = oldCurrentOff;
+ currentPageIx = oldCurrentPageId;
+ currentElementIx = oldCurrentElementIx;
+
+ return strBuilder.toString();
+ }
+
+ public String printCurrentElement(ISerializerDeserializer[] serdes) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = serdes[i].deserialize(dataIn);
+ strBuilder.append(o.toString());
+ if (i + 1 < tuple.getFieldCount())
+ strBuilder.append(",");
+ }
+ return strBuilder.toString();
+ }
+
+ private int binarySearch(int[] arr, int arrStart, int arrLength, int key) {
+ int mid;
+ int begin = arrStart;
+ int end = arrStart + arrLength - 1;
+
+ while (begin <= end) {
+ mid = (begin + end) / 2;
+ int cmp = (key - arr[mid]);
+ if (cmp < 0) {
+ end = mid - 1;
+ } else if (cmp > 0) {
+ begin = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+
+ if (begin > arr.length - 1)
+ return -1;
+ if (key < arr[begin])
+ return begin;
+ else
+ return -1;
+ }
+
+ @Override
+ public int compareTo(IInvertedListCursor invListCursor) {
+ return numElements - invListCursor.getNumElements();
+ }
+
+ @Override
+ public int getEndPageId() {
+ return endPageId;
+ }
+
+ @Override
+ public int getNumElements() {
+ return numElements;
+ }
+
+ @Override
+ public int getStartOff() {
+ return startOff;
+ }
+
+ @Override
+ public int getStartPageId() {
+ return startPageId;
+ }
+
+ public int getOffset() {
+ return currentOff;
+ }
+
+ public ICachedPage getPage() {
+ return pages[currentPageIx];
+ }
+
+ @Override
+ public ITupleReference getTuple() {
+ return tuple;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java
new file mode 100644
index 0000000..9858eb0
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAccessor.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+
+public class FixedSizeFrameTupleAccessor implements IFrameTupleAccessor {
+
+ private final int frameSize;
+ private ByteBuffer buffer;
+
+ private final ITypeTrait[] fields;
+ private final int[] fieldStartOffsets;
+ private final int tupleSize;
+
+ public FixedSizeFrameTupleAccessor(int frameSize, ITypeTrait[] fields) {
+ this.frameSize = frameSize;
+ this.fields = fields;
+ this.fieldStartOffsets = new int[fields.length];
+ this.fieldStartOffsets[0] = 0;
+ for (int i = 1; i < fields.length; i++) {
+ fieldStartOffsets[i] = fieldStartOffsets[i - 1] + fields[i - 1].getStaticallyKnownDataLength();
+ }
+
+ int tmp = 0;
+ for (int i = 0; i < fields.length; i++) {
+ tmp += fields[i].getStaticallyKnownDataLength();
+ }
+ tupleSize = tmp;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ return buffer;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fields.length;
+ }
+
+ @Override
+ public int getFieldEndOffset(int tupleIndex, int fIdx) {
+ return getTupleStartOffset(tupleIndex) + fieldStartOffsets[fIdx] + fields[fIdx].getStaticallyKnownDataLength();
+ }
+
+ @Override
+ public int getFieldLength(int tupleIndex, int fIdx) {
+ return fields[fIdx].getStaticallyKnownDataLength();
+ }
+
+ @Override
+ public int getFieldSlotsLength() {
+ return 0;
+ }
+
+ @Override
+ public int getFieldStartOffset(int tupleIndex, int fIdx) {
+ return tupleIndex * tupleSize + fieldStartOffsets[fIdx];
+ }
+
+ @Override
+ public int getTupleCount() {
+ return buffer.getInt(FrameHelper.getTupleCountOffset(frameSize));
+ }
+
+ @Override
+ public int getTupleEndOffset(int tupleIndex) {
+ return getFieldEndOffset(tupleIndex, fields.length - 1);
+ }
+
+ @Override
+ public int getTupleStartOffset(int tupleIndex) {
+ return tupleIndex * tupleSize;
+ }
+
+ @Override
+ public void reset(ByteBuffer buffer) {
+ this.buffer = buffer;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java
new file mode 100644
index 0000000..edc2304
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeFrameTupleAppender.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.comm.FrameHelper;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+
+public class FixedSizeFrameTupleAppender {
+
+ private static final int TUPLE_COUNT_SIZE = 4;
+ private final int frameSize;
+ private final int tupleSize;
+ private ByteBuffer buffer;
+ private int tupleCount;
+ private int tupleDataEndOffset;
+
+ public FixedSizeFrameTupleAppender(int frameSize, ITypeTrait[] fields) {
+ this.frameSize = frameSize;
+ int tmp = 0;
+ for (int i = 0; i < fields.length; i++) {
+ tmp += fields[i].getStaticallyKnownDataLength();
+ }
+ tupleSize = tmp;
+ }
+
+ public void reset(ByteBuffer buffer, boolean clear) {
+ this.buffer = buffer;
+ if (clear) {
+ buffer.putInt(FrameHelper.getTupleCountOffset(frameSize), 0);
+ tupleCount = 0;
+ tupleDataEndOffset = 0;
+ }
+ }
+
+ public boolean append(byte[] bytes, int offset) {
+ if (tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize) {
+ System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, tupleSize);
+ tupleDataEndOffset += tupleSize;
+ tupleCount++;
+ return true;
+ }
+ return false;
+ }
+
+ public boolean append(byte[] bytes, int offset, int length) {
+ if (tupleDataEndOffset + length + TUPLE_COUNT_SIZE <= frameSize) {
+ System.arraycopy(bytes, offset, buffer.array(), tupleDataEndOffset, length);
+ tupleDataEndOffset += length;
+ return true;
+ }
+ return false;
+ }
+
+ public boolean append(int fieldValue) {
+ if (tupleDataEndOffset + 4 + TUPLE_COUNT_SIZE <= frameSize) {
+ buffer.putInt(tupleDataEndOffset, fieldValue);
+ tupleDataEndOffset += 4;
+ tupleCount++;
+ return true;
+ }
+ return false;
+ }
+
+ public boolean append(long fieldValue) {
+ if (tupleDataEndOffset + 8 + TUPLE_COUNT_SIZE <= frameSize) {
+ buffer.putLong(tupleDataEndOffset, fieldValue);
+ tupleDataEndOffset += 8;
+ tupleCount++;
+ return true;
+ }
+ return false;
+ }
+
+ public boolean append(char fieldValue) {
+ if (tupleDataEndOffset + 2 + TUPLE_COUNT_SIZE <= frameSize) {
+ buffer.putLong(tupleDataEndOffset, fieldValue);
+ tupleDataEndOffset += 2;
+ tupleCount++;
+ return true;
+ }
+ return false;
+ }
+
+ public boolean append(byte fieldValue) {
+ if (tupleDataEndOffset + 1 + TUPLE_COUNT_SIZE <= frameSize) {
+ buffer.put(tupleDataEndOffset, fieldValue);
+ tupleDataEndOffset += 1;
+ tupleCount++;
+ return true;
+ }
+ return false;
+ }
+
+ // returns true if an entire tuple fits
+ // returns false otherwise
+ public boolean hasSpace() {
+ return tupleDataEndOffset + tupleSize + TUPLE_COUNT_SIZE <= frameSize;
+ }
+
+ public void incrementTupleCount(int count) {
+ buffer.putInt(FrameHelper.getTupleCountOffset(frameSize),
+ buffer.getInt(FrameHelper.getTupleCountOffset(frameSize)) + count);
+ }
+
+ public int getTupleCount() {
+ return tupleCount;
+ }
+
+ public ByteBuffer getBuffer() {
+ return buffer;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java
new file mode 100644
index 0000000..248b81e
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/FixedSizeTupleReference.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class FixedSizeTupleReference implements ITupleReference {
+
+ private final ITypeTrait[] typeTraits;
+ private final int[] fieldStartOffsets;
+ private byte[] data;
+ private int startOff;
+
+ public FixedSizeTupleReference(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ this.fieldStartOffsets = new int[typeTraits.length];
+ this.fieldStartOffsets[0] = 0;
+ for (int i = 1; i < typeTraits.length; i++) {
+ fieldStartOffsets[i] = fieldStartOffsets[i - 1] + typeTraits[i - 1].getStaticallyKnownDataLength();
+ }
+ }
+
+ public void reset(byte[] data, int startOff) {
+ this.data = data;
+ this.startOff = startOff;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return typeTraits.length;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return data;
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ return typeTraits[fIdx].getStaticallyKnownDataLength();
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ return startOff + fieldStartOffsets[fIdx];
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
new file mode 100644
index 0000000..9eab110
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndex.java
@@ -0,0 +1,289 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+/**
+ * An inverted index consists of two files: 1. a file storing (paginated)
+ * inverted lists 2. a BTree-file mapping from tokens to inverted lists
+ *
+ * Implemented features: bulk loading and searching (based on T-Occurrence) Not
+ * implemented features: updates (insert/update/delete) Limitations: a query
+ * cannot exceed the size of a Hyracks frame
+ */
+public class InvertedIndex {
+
+ private BTree btree;
+ private int rootPageId = 0;
+ private IBufferCache bufferCache;
+ private int fileId;
+ private final MultiComparator invListCmp;
+ private final int numTokenFields;
+ private final int numInvListKeys;
+
+ public InvertedIndex(IBufferCache bufferCache, BTree btree, MultiComparator invListCmp) {
+ this.bufferCache = bufferCache;
+ this.btree = btree;
+ this.invListCmp = invListCmp;
+ this.numTokenFields = btree.getMultiComparator().getKeyFieldCount();
+ this.numInvListKeys = invListCmp.getKeyFieldCount();
+ }
+
+ public void open(int fileId) {
+ this.fileId = fileId;
+ }
+
+ public void close() {
+ this.fileId = -1;
+ }
+
+ public BulkLoadContext beginBulkLoad(IInvertedListBuilder invListBuilder, int hyracksFrameSize,
+ float btreeFillFactor) throws HyracksDataException {
+ BulkLoadContext ctx = new BulkLoadContext(invListBuilder, hyracksFrameSize, btreeFillFactor);
+ ctx.init(rootPageId, fileId);
+ return ctx;
+ }
+
+ // ASSUMPTIONS:
+ // the first btree.getMultiComparator().getKeyFieldCount() fields in tuple
+ // are btree keys (e.g., a string token)
+ // the next invListCmp.getKeyFieldCount() fields in tuple are keys of the
+ // inverted list (e.g., primary key)
+ // key fields of inverted list are fixed size
+ public void bulkLoadAddTuple(BulkLoadContext ctx, ITupleReference tuple) throws HyracksDataException {
+
+ // first inverted list, copy token to baaos and start new list
+ if (ctx.currentInvListTokenBaaos.size() == 0) {
+ ctx.currentInvListStartPageId = ctx.currentPageId;
+ ctx.currentInvListStartOffset = ctx.invListBuilder.getPos();
+
+ // remember current token
+ ctx.currentInvListTokenBaaos.reset();
+ for (int i = 0; i < numTokenFields; i++) {
+ ctx.currentInvListTokenBaaos.write(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ }
+
+ if (!ctx.invListBuilder.startNewList(tuple, numTokenFields)) {
+ ctx.pinNextPage();
+ ctx.invListBuilder.setTargetBuffer(ctx.currentPage.getBuffer().array(), 0);
+ if (!ctx.invListBuilder.startNewList(tuple, numTokenFields)) {
+ throw new IllegalStateException("Failed to create first inverted list.");
+ }
+ }
+ }
+
+ // create new inverted list?
+ ctx.currentInvListToken.reset(ctx.currentInvListTokenBaaos.getByteArray(), 0);
+ if (ctx.tokenCmp.compare(tuple, ctx.currentInvListToken) != 0) {
+
+ // create entry in btree for last inverted list
+ createAndInsertBTreeTuple(ctx);
+
+ // remember new token
+ ctx.currentInvListTokenBaaos.reset();
+ for (int i = 0; i < numTokenFields; i++) {
+ ctx.currentInvListTokenBaaos.write(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ }
+
+ // start new list
+ if (!ctx.invListBuilder.startNewList(tuple, numTokenFields)) {
+ ctx.pinNextPage();
+ ctx.invListBuilder.setTargetBuffer(ctx.currentPage.getBuffer().array(), 0);
+ if (!ctx.invListBuilder.startNewList(tuple, numTokenFields)) {
+ throw new IllegalStateException("Failed to start new inverted list after switching to a new page.");
+ }
+ }
+
+ ctx.currentInvListStartPageId = ctx.currentPageId;
+ ctx.currentInvListStartOffset = ctx.invListBuilder.getPos();
+ }
+
+ // append to current inverted list
+ if (!ctx.invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
+ ctx.pinNextPage();
+ ctx.invListBuilder.setTargetBuffer(ctx.currentPage.getBuffer().array(), 0);
+ if (!ctx.invListBuilder.appendElement(tuple, numTokenFields, numInvListKeys)) {
+ throw new IllegalStateException(
+ "Failed to append element to inverted list after switching to a new page.");
+ }
+ }
+ }
+
+ public boolean openCursor(ITreeIndexCursor btreeCursor, RangePredicate btreePred, BTreeOpContext btreeOpCtx,
+ IInvertedListCursor invListCursor) throws Exception {
+ btree.search(btreeCursor, btreePred, btreeOpCtx);
+
+ boolean ret = false;
+ if (btreeCursor.hasNext()) {
+
+ btreeCursor.next();
+ ITupleReference frameTuple = btreeCursor.getTuple();
+
+ // hardcoded mapping of btree fields
+ int startPageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(1),
+ frameTuple.getFieldStart(1));
+ int endPageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(2),
+ frameTuple.getFieldStart(2));
+ int startOff = IntegerSerializerDeserializer
+ .getInt(frameTuple.getFieldData(3), frameTuple.getFieldStart(3));
+ int numElements = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(4),
+ frameTuple.getFieldStart(4));
+
+ invListCursor.reset(startPageId, endPageId, startOff, numElements);
+ ret = true;
+ } else {
+ invListCursor.reset(0, 0, 0, 0);
+ }
+
+ btreeCursor.close();
+ btreeCursor.reset();
+
+ return ret;
+ }
+
+ public void createAndInsertBTreeTuple(BulkLoadContext ctx) throws HyracksDataException {
+ // build tuple
+ ctx.btreeTupleBuilder.reset();
+ ctx.btreeTupleBuilder.addField(ctx.currentInvListTokenBaaos.getByteArray(), 0,
+ ctx.currentInvListTokenBaaos.size());
+ ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentInvListStartPageId);
+ ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentPageId);
+ ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.currentInvListStartOffset);
+ ctx.btreeTupleBuilder.addField(IntegerSerializerDeserializer.INSTANCE, ctx.invListBuilder.getListSize());
+
+ // append to buffer
+ ctx.btreeTupleAppender.reset(ctx.btreeTupleBuffer, true);
+ ctx.btreeTupleAppender.append(ctx.btreeTupleBuilder.getFieldEndOffsets(), ctx.btreeTupleBuilder.getByteArray(),
+ 0, ctx.btreeTupleBuilder.getSize());
+
+ // reset tuple reference
+ ctx.btreeFrameTupleReference.reset(ctx.btreeFrameTupleAccessor, 0);
+
+ btree.bulkLoadAddTuple(ctx.btreeBulkLoadCtx, ctx.btreeFrameTupleReference);
+ }
+
+ public void endBulkLoad(BulkLoadContext ctx) throws HyracksDataException {
+ // create entry in btree for last inverted list
+ createAndInsertBTreeTuple(ctx);
+ btree.endBulkLoad(ctx.btreeBulkLoadCtx);
+ ctx.deinit();
+ }
+
+ public IBufferCache getBufferCache() {
+ return bufferCache;
+ }
+
+ public int getInvListsFileId() {
+ return fileId;
+ }
+
+ public MultiComparator getInvListElementCmp() {
+ return invListCmp;
+ }
+
+ public BTree getBTree() {
+ return btree;
+ }
+
+ public final class BulkLoadContext {
+ private final ByteBuffer btreeTupleBuffer;
+ private final ArrayTupleBuilder btreeTupleBuilder;
+ private final FrameTupleAppender btreeTupleAppender;
+ private final FrameTupleAccessor btreeFrameTupleAccessor;
+ private final FrameTupleReference btreeFrameTupleReference = new FrameTupleReference();
+ private final float btreeFillFactor;
+ private IIndexBulkLoadContext btreeBulkLoadCtx;
+
+ private int currentInvListStartPageId;
+ private int currentInvListStartOffset;
+ private final ByteArrayAccessibleOutputStream currentInvListTokenBaaos = new ByteArrayAccessibleOutputStream();
+ private final FixedSizeTupleReference currentInvListToken = new FixedSizeTupleReference(
+ invListCmp.getTypeTraits());
+
+ private int currentPageId;
+ private ICachedPage currentPage;
+ private final IInvertedListBuilder invListBuilder;
+ private final MultiComparator tokenCmp;
+
+ public BulkLoadContext(IInvertedListBuilder invListBuilder, int hyracksFrameSize, float btreeFillFactor) {
+ this.invListBuilder = invListBuilder;
+ this.tokenCmp = btree.getMultiComparator();
+ this.btreeTupleBuffer = ByteBuffer.allocate(hyracksFrameSize);
+ this.btreeTupleBuilder = new ArrayTupleBuilder(tokenCmp.getFieldCount());
+ this.btreeTupleAppender = new FrameTupleAppender(hyracksFrameSize);
+ // TODO: serde never used, only need correct number of fields
+ // tuple contains (token, start page, end page, start offset, num
+ // elements)
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE });
+ this.btreeFrameTupleAccessor = new FrameTupleAccessor(hyracksFrameSize, recDesc);
+ this.btreeFillFactor = btreeFillFactor;
+ }
+
+ public void init(int startPageId, int fileId) throws HyracksDataException {
+ btreeBulkLoadCtx = btree.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR,
+ btree.getLeafFrameFactory().createFrame(), btree.getInteriorFrameFactory().createFrame(), btree
+ .getFreePageManager().getMetaDataFrameFactory().createFrame());
+ currentPageId = startPageId;
+ currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
+ currentPage.acquireWriteLatch();
+ invListBuilder.setTargetBuffer(currentPage.getBuffer().array(), 0);
+ btreeFrameTupleAccessor.reset(btreeTupleBuffer);
+ }
+
+ public void deinit() throws HyracksDataException {
+ if (currentPage != null) {
+ currentPage.releaseWriteLatch();
+ bufferCache.unpin(currentPage);
+ }
+ }
+
+ public void pinNextPage() throws HyracksDataException {
+ currentPage.releaseWriteLatch();
+ bufferCache.unpin(currentPage);
+ currentPageId++;
+ currentPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), true);
+ currentPage.acquireWriteLatch();
+ }
+ };
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java
similarity index 72%
copy from hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
copy to hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java
index 7e91fd4..494ac0c 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/api/IBinaryTokenizerFactory.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/InvertedIndexException.java
@@ -13,10 +13,12 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.invertedindex.api;
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
-import java.io.Serializable;
+public class InvertedIndexException extends Exception {
+ private static final long serialVersionUID = 1L;
-public interface IBinaryTokenizerFactory extends Serializable {
- public IBinaryTokenizer createBinaryTokenizer();
+ public InvertedIndexException(String msg) {
+ super(msg);
+ }
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java
deleted file mode 100644
index adc5221..0000000
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/ListResultCursor.java
+++ /dev/null
@@ -1,57 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
-
-import java.nio.ByteBuffer;
-import java.util.List;
-
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
-
-public class ListResultCursor implements IInvertedIndexResultCursor {
-
- private List<ByteBuffer> resultBuffers;
- private int numResultBuffers;
- private int currentPos = 0;
-
- public void setResults(List<ByteBuffer> resultBuffers, int numResultBuffers) {
- this.resultBuffers = resultBuffers;
- this.numResultBuffers = numResultBuffers;
- reset();
- }
-
- @Override
- public boolean hasNext() {
- if (currentPos + 1 < numResultBuffers)
- return true;
- else
- return false;
- }
-
- @Override
- public void next() {
- currentPos++;
- }
-
- @Override
- public ByteBuffer getBuffer() {
- return resultBuffers.get(currentPos);
- }
-
- @Override
- public void reset() {
- currentPos = -1;
- }
-}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java
similarity index 69%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
copy to hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java
index 50733de..b0e737c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeLeafFrameFactory.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/OccurrenceThresholdPanicException.java
@@ -13,10 +13,12 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
-import java.io.Serializable;
+public class OccurrenceThresholdPanicException extends InvertedIndexException {
+ private static final long serialVersionUID = 1L;
-public interface IBTreeLeafFrameFactory extends Serializable {
- public IBTreeLeafFrame getFrame();
-}
\ No newline at end of file
+ public OccurrenceThresholdPanicException(String msg) {
+ super(msg);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SearchResultCursor.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SearchResultCursor.java
new file mode 100644
index 0000000..5a7230c
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SearchResultCursor.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
+
+public class SearchResultCursor implements IInvertedIndexResultCursor {
+
+ private List<ByteBuffer> resultBuffers;
+ private IFrameTupleAccessor fta;
+ private FixedSizeTupleReference resultTuple;
+ private int numResultBuffers;
+ private int currentBufferIndex = 0;
+ private int tupleIndex = 0;
+
+ public SearchResultCursor(IFrameTupleAccessor fta, ITupleReference resultTuple) {
+ this.fta = fta;
+ this.resultTuple = (FixedSizeTupleReference) resultTuple;
+ }
+
+ @Override
+ public boolean hasNext() {
+ if (currentBufferIndex < numResultBuffers && tupleIndex < fta.getTupleCount())
+ return true;
+ else
+ return false;
+ }
+
+ @Override
+ public void next() {
+ resultTuple.reset(fta.getBuffer().array(), fta.getTupleStartOffset(tupleIndex));
+ tupleIndex++;
+ if (tupleIndex >= fta.getTupleCount()) {
+ if (currentBufferIndex + 1 < numResultBuffers) {
+ currentBufferIndex++;
+ fta.reset(resultBuffers.get(currentBufferIndex));
+ tupleIndex = 0;
+ }
+ }
+ }
+
+ @Override
+ public ITupleReference getTuple() {
+ return resultTuple;
+ }
+
+ @Override
+ public void reset(IInvertedIndexSearcher invIndexSearcher) {
+ currentBufferIndex = 0;
+ tupleIndex = 0;
+ resultBuffers = invIndexSearcher.getResultBuffers();
+ numResultBuffers = invIndexSearcher.getNumValidResultBuffers();
+ if (numResultBuffers > 0) {
+ fta.reset(resultBuffers.get(0));
+ }
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java
deleted file mode 100644
index 6ad5a9c..0000000
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/SimpleConjunctiveSearcher.java
+++ /dev/null
@@ -1,294 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
-
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
-
-public class SimpleConjunctiveSearcher implements IInvertedIndexSearcher {
-
- private final int numKeyFields;
- private final int numValueFields;
-
- private final IBinaryComparator[] keyCmps;
- private final IBinaryComparator[] valueCmps;
-
- private final BTree btree;
- private final IHyracksStageletContext ctx;
- private final ArrayTupleBuilder resultTupleBuilder;
- private final FrameTupleAppender resultTupleAppender;
- private final FrameTupleAccessor resultFrameAccessor;
-
- private List<ByteBuffer> newResultBuffers = new ArrayList<ByteBuffer>();
- private List<ByteBuffer> prevResultBuffers = new ArrayList<ByteBuffer>();
- private List<ByteBuffer> swap = null;
- private final ListResultCursor resultCursor = new ListResultCursor();
- private int maxResultBufIdx = 0;
-
- private final IBTreeLeafFrame leafFrame;
- private final IBTreeInteriorFrame interiorFrame;
- private final IBTreeCursor btreeCursor;
- private final FrameTupleReference searchKey = new FrameTupleReference();
- private final RangePredicate pred = new RangePredicate(true, null, null, true, true, null, null);
-
- private final IBinaryTokenizer queryTokenizer;
-
- public SimpleConjunctiveSearcher(IHyracksStageletContext ctx, BTree btree, RecordDescriptor btreeRecDesc,
- IBinaryTokenizer queryTokenizer, int numKeyFields, int numValueFields) {
- this.ctx = ctx;
- this.btree = btree;
- this.queryTokenizer = queryTokenizer;
- this.numKeyFields = numKeyFields;
- this.numValueFields = numValueFields;
-
- leafFrame = btree.getLeafFrameFactory().getFrame();
- interiorFrame = btree.getInteriorFrameFactory().getFrame();
- btreeCursor = new RangeSearchCursor(leafFrame);
- resultTupleAppender = new FrameTupleAppender(ctx.getFrameSize());
- resultTupleBuilder = new ArrayTupleBuilder(numValueFields);
- newResultBuffers.add(ctx.allocateFrame());
- prevResultBuffers.add(ctx.allocateFrame());
-
- MultiComparator btreeCmp = btree.getMultiComparator();
-
- keyCmps = new IBinaryComparator[numKeyFields];
- for (int i = 0; i < numKeyFields; i++) {
- keyCmps[i] = btreeCmp.getComparators()[i];
- }
-
- valueCmps = new IBinaryComparator[numValueFields];
- for (int i = 0; i < numValueFields; i++) {
- valueCmps[i] = btreeCmp.getComparators()[numKeyFields + i];
- }
-
- MultiComparator searchCmp = new MultiComparator(btreeCmp.getTypeTraits(), keyCmps);
- pred.setLowKeyComparator(searchCmp);
- pred.setHighKeyComparator(searchCmp);
- pred.setLowKey(searchKey, true);
- pred.setHighKey(searchKey, true);
-
- ISerializerDeserializer[] valueSerde = new ISerializerDeserializer[numValueFields];
- for (int i = 0; i < numValueFields; i++) {
- valueSerde[i] = btreeRecDesc.getFields()[numKeyFields + i];
-
- }
- RecordDescriptor valueRecDesc = new RecordDescriptor(valueSerde);
- resultFrameAccessor = new FrameTupleAccessor(ctx.getFrameSize(), valueRecDesc);
- }
-
- public void search(ITupleReference queryTuple, int queryFieldIndex) throws Exception {
-
- // parse query, TODO: this parsing is too simple
- RecordDescriptor queryTokenRecDesc = new RecordDescriptor(
- new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
-
- ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFields().length);
- DataOutput queryTokenDos = queryTokenBuilder.getDataOutput();
- FrameTupleAppender queryTokenAppender = new FrameTupleAppender(ctx.getFrameSize());
- ByteBuffer queryTokenFrame = ctx.allocateFrame();
- queryTokenAppender.reset(queryTokenFrame, true);
-
- queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex),
- queryTuple.getFieldLength(queryFieldIndex));
- while (queryTokenizer.hasNext()) {
- queryTokenizer.next();
-
- queryTokenBuilder.reset();
- try {
- queryTokenizer.writeToken(queryTokenDos);
- queryTokenBuilder.addFieldEndOffset();
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
-
- // WARNING: assuming one frame is enough to hold all tokens
- queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
- queryTokenBuilder.getSize());
- }
-
- FrameTupleAccessor queryTokenAccessor = new FrameTupleAccessor(ctx.getFrameSize(), queryTokenRecDesc);
- queryTokenAccessor.reset(queryTokenFrame);
- int numQueryTokens = queryTokenAccessor.getTupleCount();
-
- maxResultBufIdx = 0;
-
- BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
-
- resultTupleAppender.reset(newResultBuffers.get(0), true);
- try {
- // append first inverted list to temporary results
- searchKey.reset(queryTokenAccessor, 0);
- btree.search(btreeCursor, pred, opCtx);
- while (btreeCursor.hasNext()) {
- btreeCursor.next();
- maxResultBufIdx = appendTupleToNewResults(btreeCursor, maxResultBufIdx);
- }
- btreeCursor.close();
- btreeCursor.reset();
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
-
- resultFrameAccessor.reset(newResultBuffers.get(0));
-
- // intersect temporary results with remaining inverted lists
- for (int i = 1; i < numQueryTokens; i++) {
- swap = prevResultBuffers;
- prevResultBuffers = newResultBuffers;
- newResultBuffers = swap;
- try {
- searchKey.reset(queryTokenAccessor, i);
- btree.search(btreeCursor, pred, opCtx);
- maxResultBufIdx = intersectList(btreeCursor, prevResultBuffers, maxResultBufIdx, newResultBuffers);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- btreeCursor.close();
- btreeCursor.reset();
- }
- }
-
- private int appendTupleToNewResults(IBTreeCursor btreeCursor, int newBufIdx) throws IOException {
- ByteBuffer newCurrentBuffer = newResultBuffers.get(newBufIdx);
-
- ITupleReference tuple = btreeCursor.getTuple();
- resultTupleBuilder.reset();
- DataOutput dos = resultTupleBuilder.getDataOutput();
- for (int i = 0; i < numValueFields; i++) {
- int fIdx = numKeyFields + i;
- dos.write(tuple.getFieldData(fIdx), tuple.getFieldStart(fIdx), tuple.getFieldLength(fIdx));
- resultTupleBuilder.addFieldEndOffset();
- }
-
- if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(), 0,
- resultTupleBuilder.getSize())) {
- newBufIdx++;
- if (newBufIdx >= newResultBuffers.size()) {
- newResultBuffers.add(ctx.allocateFrame());
- }
- newCurrentBuffer = newResultBuffers.get(newBufIdx);
- resultTupleAppender.reset(newCurrentBuffer, true);
- if (!resultTupleAppender.append(resultTupleBuilder.getFieldEndOffsets(), resultTupleBuilder.getByteArray(),
- 0, resultTupleBuilder.getSize())) {
- throw new IllegalStateException();
- }
- }
-
- return newBufIdx;
- }
-
- private int intersectList(IBTreeCursor btreeCursor, List<ByteBuffer> prevResultBuffers, int maxPrevBufIdx,
- List<ByteBuffer> newResultBuffers) throws IOException, Exception {
-
- int newBufIdx = 0;
- ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
-
- int prevBufIdx = 0;
- ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
-
- resultTupleBuilder.reset();
- resultTupleAppender.reset(newCurrentBuffer, true);
- resultFrameAccessor.reset(prevCurrentBuffer);
-
- // WARNING: not very efficient but good enough for the first cut
- boolean advanceCursor = true;
- boolean advancePrevResult = false;
- int resultTidx = 0;
-
- while ((!advanceCursor || btreeCursor.hasNext()) && prevBufIdx <= maxPrevBufIdx
- && resultTidx < resultFrameAccessor.getTupleCount()) {
-
- if (advanceCursor)
- btreeCursor.next();
- ITupleReference tuple = btreeCursor.getTuple();
-
- int cmp = 0;
- for (int i = 0; i < valueCmps.length; i++) {
- int tupleFidx = numKeyFields + i;
- cmp = valueCmps[i].compare(tuple.getFieldData(tupleFidx), tuple.getFieldStart(tupleFidx),
- tuple.getFieldLength(tupleFidx), resultFrameAccessor.getBuffer().array(),
- resultFrameAccessor.getTupleStartOffset(resultTidx) + resultFrameAccessor.getFieldSlotsLength()
- + resultFrameAccessor.getFieldStartOffset(resultTidx, i),
- resultFrameAccessor.getFieldLength(resultTidx, i));
- if (cmp != 0)
- break;
- }
-
- // match found
- if (cmp == 0) {
- newBufIdx = appendTupleToNewResults(btreeCursor, newBufIdx);
-
- advanceCursor = true;
- advancePrevResult = true;
- } else {
- if (cmp < 0) {
- advanceCursor = true;
- advancePrevResult = false;
- } else {
- advanceCursor = false;
- advancePrevResult = true;
- }
- }
-
- if (advancePrevResult) {
- resultTidx++;
- if (resultTidx >= resultFrameAccessor.getTupleCount()) {
- prevBufIdx++;
- if (prevBufIdx <= maxPrevBufIdx) {
- prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
- resultFrameAccessor.reset(prevCurrentBuffer);
- resultTidx = 0;
- }
- }
- }
- }
-
- return newBufIdx;
- }
-
- @Override
- public IInvertedIndexResultCursor getResultCursor() {
- resultCursor.setResults(newResultBuffers, maxResultBufIdx + 1);
- return resultCursor;
- }
-}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
new file mode 100644
index 0000000..d1fba3b
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcher.java
@@ -0,0 +1,543 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+
+public class TOccurrenceSearcher implements IInvertedIndexSearcher {
+
+ protected final IHyracksStageletContext ctx;
+ protected final FixedSizeFrameTupleAppender resultFrameTupleApp;
+ protected final FixedSizeFrameTupleAccessor resultFrameTupleAcc;
+ protected final FixedSizeTupleReference resultTuple;
+ protected final int invListKeyLength;
+ protected int currentNumResults;
+
+ protected List<ByteBuffer> newResultBuffers = new ArrayList<ByteBuffer>();
+ protected List<ByteBuffer> prevResultBuffers = new ArrayList<ByteBuffer>();
+ protected List<ByteBuffer> swap = null;
+ protected int maxResultBufIdx = 0;
+
+ protected final ITreeIndexFrame leafFrame;
+ protected final ITreeIndexFrame interiorFrame;
+ protected final ITreeIndexCursor btreeCursor;
+ protected final FrameTupleReference searchKey = new FrameTupleReference();
+ protected final RangePredicate btreePred = new RangePredicate(true, null, null, true, true, null, null);
+ protected final BTreeOpContext btreeOpCtx;
+
+ protected RecordDescriptor queryTokenRecDesc = new RecordDescriptor(
+ new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+ protected ArrayTupleBuilder queryTokenBuilder = new ArrayTupleBuilder(queryTokenRecDesc.getFields().length);
+ protected DataOutput queryTokenDos = queryTokenBuilder.getDataOutput();
+ protected FrameTupleAppender queryTokenAppender;
+ protected ByteBuffer queryTokenFrame;
+
+ protected final InvertedIndex invIndex;
+ protected final IBinaryTokenizer queryTokenizer;
+ protected final ITypeTrait[] invListFieldsWithCount;
+ protected int occurrenceThreshold;
+
+ protected final int cursorCacheSize = 10;
+ protected List<IInvertedListCursor> invListCursorCache = new ArrayList<IInvertedListCursor>(cursorCacheSize);
+ protected List<IInvertedListCursor> invListCursors = new ArrayList<IInvertedListCursor>(cursorCacheSize);
+
+ public TOccurrenceSearcher(IHyracksStageletContext ctx, InvertedIndex invIndex, IBinaryTokenizer queryTokenizer) {
+ this.ctx = ctx;
+ this.invIndex = invIndex;
+ this.queryTokenizer = queryTokenizer;
+
+ leafFrame = invIndex.getBTree().getLeafFrameFactory().createFrame();
+ interiorFrame = invIndex.getBTree().getInteriorFrameFactory().createFrame();
+
+ btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
+ ITypeTrait[] invListFields = invIndex.getInvListElementCmp().getTypeTraits();
+ invListFieldsWithCount = new TypeTrait[invListFields.length + 1];
+ int tmp = 0;
+ for (int i = 0; i < invListFields.length; i++) {
+ invListFieldsWithCount[i] = invListFields[i];
+ tmp += invListFields[i].getStaticallyKnownDataLength();
+ }
+ // using an integer for counting occurrences
+ invListFieldsWithCount[invListFields.length] = new TypeTrait(4);
+ invListKeyLength = tmp;
+
+ btreeOpCtx = invIndex.getBTree().createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+
+ resultFrameTupleApp = new FixedSizeFrameTupleAppender(ctx.getFrameSize(), invListFieldsWithCount);
+ resultFrameTupleAcc = new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), invListFieldsWithCount);
+ resultTuple = new FixedSizeTupleReference(invListFieldsWithCount);
+ newResultBuffers.add(ctx.allocateFrame());
+ prevResultBuffers.add(ctx.allocateFrame());
+
+ MultiComparator searchCmp = invIndex.getBTree().getMultiComparator();
+ btreePred.setLowKeyComparator(searchCmp);
+ btreePred.setHighKeyComparator(searchCmp);
+ btreePred.setLowKey(searchKey, true);
+ btreePred.setHighKey(searchKey, true);
+
+ // pre-create cursor objects
+ for (int i = 0; i < cursorCacheSize; i++) {
+ invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
+ .getInvListsFileId(), invIndex.getInvListElementCmp().getTypeTraits()));
+ }
+
+ queryTokenAppender = new FrameTupleAppender(ctx.getFrameSize());
+ queryTokenFrame = ctx.allocateFrame();
+
+ currentNumResults = 0;
+ }
+
+ public void reset() {
+ for (ByteBuffer b : newResultBuffers) {
+ resultFrameTupleApp.reset(b, true);
+ }
+ for (ByteBuffer b : prevResultBuffers) {
+ resultFrameTupleApp.reset(b, true);
+ }
+ currentNumResults = 0;
+ }
+
+ public void search(IInvertedIndexResultCursor resultCursor, ITupleReference queryTuple, int queryFieldIndex,
+ IInvertedIndexSearchModifier searchModifier) throws Exception {
+
+ queryTokenAppender.reset(queryTokenFrame, true);
+ queryTokenizer.reset(queryTuple.getFieldData(queryFieldIndex), queryTuple.getFieldStart(queryFieldIndex),
+ queryTuple.getFieldLength(queryFieldIndex));
+
+ while (queryTokenizer.hasNext()) {
+ queryTokenizer.next();
+ queryTokenBuilder.reset();
+ try {
+ IToken token = queryTokenizer.getToken();
+ token.serializeToken(queryTokenDos);
+ queryTokenBuilder.addFieldEndOffset();
+ // WARNING: assuming one frame is big enough to hold all tokens
+ queryTokenAppender.append(queryTokenBuilder.getFieldEndOffsets(), queryTokenBuilder.getByteArray(), 0,
+ queryTokenBuilder.getSize());
+ } catch (IOException e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ FrameTupleAccessor queryTokenAccessor = new FrameTupleAccessor(ctx.getFrameSize(), queryTokenRecDesc);
+ queryTokenAccessor.reset(queryTokenFrame);
+ int numQueryTokens = queryTokenAccessor.getTupleCount();
+
+ // expand cursor cache if necessary
+ if (numQueryTokens > invListCursorCache.size()) {
+ int diff = numQueryTokens - invListCursorCache.size();
+ for (int i = 0; i < diff; i++) {
+ invListCursorCache.add(new FixedSizeElementInvertedListCursor(invIndex.getBufferCache(), invIndex
+ .getInvListsFileId(), invIndex.getInvListElementCmp().getTypeTraits()));
+ }
+ }
+
+ invListCursors.clear();
+ for (int i = 0; i < numQueryTokens; i++) {
+ searchKey.reset(queryTokenAccessor, i);
+ invIndex.openCursor(btreeCursor, btreePred, btreeOpCtx, invListCursorCache.get(i));
+ invListCursors.add(invListCursorCache.get(i));
+ }
+
+ occurrenceThreshold = searchModifier.getOccurrenceThreshold(invListCursors);
+
+ // TODO: deal with panic cases properly
+ if (occurrenceThreshold <= 0) {
+ throw new OccurrenceThresholdPanicException("Merge Threshold is <= 0. Failing Search.");
+ }
+
+ int numPrefixLists = searchModifier.getPrefixLists(invListCursors);
+ maxResultBufIdx = mergePrefixLists(numPrefixLists, numQueryTokens);
+ maxResultBufIdx = mergeSuffixLists(numPrefixLists, numQueryTokens, maxResultBufIdx);
+
+ resultCursor.reset(this);
+ }
+
+ protected int mergePrefixLists(int numPrefixTokens, int numQueryTokens) throws IOException {
+ int maxPrevBufIdx = 0;
+ for (int i = 0; i < numPrefixTokens; i++) {
+ swap = prevResultBuffers;
+ prevResultBuffers = newResultBuffers;
+ newResultBuffers = swap;
+ currentNumResults = 0;
+
+ invListCursors.get(i).pinPagesSync();
+ maxPrevBufIdx = mergePrefixList(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx, newResultBuffers);
+ invListCursors.get(i).unpinPages();
+ }
+ return maxPrevBufIdx;
+ }
+
+ protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws IOException {
+ for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+ swap = prevResultBuffers;
+ prevResultBuffers = newResultBuffers;
+ newResultBuffers = swap;
+
+ invListCursors.get(i).pinPagesSync();
+ int numInvListElements = invListCursors.get(i).getNumElements();
+ // should we binary search the next list or should we sort-merge it?
+ if (currentNumResults * Math.log(numInvListElements) < currentNumResults + numInvListElements) {
+ maxPrevBufIdx = mergeSuffixListProbe(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+ newResultBuffers, i, numQueryTokens);
+ } else {
+ maxPrevBufIdx = mergeSuffixListScan(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+ newResultBuffers, i, numQueryTokens);
+ }
+ invListCursors.get(i).unpinPages();
+ }
+ return maxPrevBufIdx;
+ }
+
+ protected int mergeSuffixListProbe(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+ int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) throws IOException {
+
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ int resultTidx = 0;
+
+ currentNumResults = 0;
+
+ MultiComparator invListCmp = invIndex.getInvListElementCmp();
+
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+ while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+
+ if (invListCursor.containsKey(resultTuple, invListCmp)) {
+ count++;
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ } else {
+ if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ }
+ }
+
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+
+ protected int mergeSuffixListScan(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+ int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) throws IOException {
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ boolean advanceCursor = true;
+ boolean advancePrevResult = false;
+ int resultTidx = 0;
+
+ MultiComparator invListCmp = invIndex.getInvListElementCmp();
+
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+ int invListTidx = 0;
+ int invListNumTuples = invListCursor.getNumElements();
+
+ if (invListCursor.hasNext())
+ invListCursor.next();
+
+ while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ ITupleReference invListTuple = invListCursor.getTuple();
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+ int cmp = invListCmp.compare(invListTuple, resultTuple);
+ if (cmp == 0) {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ advanceCursor = true;
+ advancePrevResult = true;
+ } else {
+ if (cmp < 0) {
+ advanceCursor = true;
+ advancePrevResult = false;
+ } else {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ }
+ advanceCursor = false;
+ advancePrevResult = true;
+ }
+ }
+
+ if (advancePrevResult) {
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ if (advanceCursor) {
+ invListTidx++;
+ invListCursor.next();
+ }
+ }
+
+ // append remaining elements from previous result set
+ while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ }
+
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+
+ protected int mergePrefixList(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+ int maxPrevBufIdx, List<ByteBuffer> newResultBuffers) throws IOException {
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ boolean advanceCursor = true;
+ boolean advancePrevResult = false;
+ int resultTidx = 0;
+
+ MultiComparator invListCmp = invIndex.getInvListElementCmp();
+
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+ int invListTidx = 0;
+ int invListNumTuples = invListCursor.getNumElements();
+
+ if (invListCursor.hasNext())
+ invListCursor.next();
+
+ while (invListTidx < invListNumTuples && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ ITupleReference invListTuple = invListCursor.getTuple();
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+ int cmp = invListCmp.compare(invListTuple, resultTuple);
+ if (cmp == 0) {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ advanceCursor = true;
+ advancePrevResult = true;
+ } else {
+ if (cmp < 0) {
+ int count = 1;
+ newBufIdx = appendTupleToNewResults(invListTuple, count, newBufIdx);
+ advanceCursor = true;
+ advancePrevResult = false;
+ } else {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ advanceCursor = false;
+ advancePrevResult = true;
+ }
+ }
+
+ if (advancePrevResult) {
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ if (advanceCursor) {
+ invListTidx++;
+ invListCursor.next();
+ }
+ }
+
+ // append remaining new elements from inverted list
+ while (invListTidx < invListNumTuples) {
+ ITupleReference invListTuple = invListCursor.getTuple();
+ newBufIdx = appendTupleToNewResults(invListTuple, 1, newBufIdx);
+ invListTidx++;
+ invListCursor.next();
+ }
+
+ // append remaining elements from previous result set
+ while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+
+ protected int appendTupleToNewResults(ITupleReference tuple, int newCount, int newBufIdx) throws IOException {
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(newBufIdx);
+
+ if (!resultFrameTupleApp.hasSpace()) {
+ newBufIdx++;
+ if (newBufIdx >= newResultBuffers.size()) {
+ newResultBuffers.add(ctx.allocateFrame());
+ }
+ newCurrentBuffer = newResultBuffers.get(newBufIdx);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+ }
+
+ // append key
+ if (!resultFrameTupleApp.append(tuple.getFieldData(0), tuple.getFieldStart(0), invListKeyLength)) {
+ throw new IllegalStateException();
+ }
+
+ // append new count
+ if (!resultFrameTupleApp.append(newCount)) {
+ throw new IllegalStateException();
+ }
+
+ resultFrameTupleApp.incrementTupleCount(1);
+
+ currentNumResults++;
+
+ return newBufIdx;
+ }
+
+ public IFrameTupleAccessor createResultFrameTupleAccessor() {
+ return new FixedSizeFrameTupleAccessor(ctx.getFrameSize(), invListFieldsWithCount);
+ }
+
+ public ITupleReference createResultTupleReference() {
+ return new FixedSizeTupleReference(invListFieldsWithCount);
+ }
+
+ @Override
+ public List<ByteBuffer> getResultBuffers() {
+ return newResultBuffers;
+ }
+
+ @Override
+ public int getNumValidResultBuffers() {
+ return maxResultBufIdx + 1;
+ }
+
+ public int getOccurrenceThreshold() {
+ return occurrenceThreshold;
+ }
+
+ public void printNewResults(int maxResultBufIdx) {
+ StringBuffer strBuffer = new StringBuffer();
+ for (int i = 0; i <= maxResultBufIdx; i++) {
+ ByteBuffer testBuf = newResultBuffers.get(i);
+ resultFrameTupleAcc.reset(testBuf);
+ for (int j = 0; j < resultFrameTupleAcc.getTupleCount(); j++) {
+ strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
+ resultFrameTupleAcc.getFieldStartOffset(j, 0)) + ",");
+ strBuffer.append(IntegerSerializerDeserializer.getInt(resultFrameTupleAcc.getBuffer().array(),
+ resultFrameTupleAcc.getFieldStartOffset(j, 1)) + " ");
+ }
+ }
+ System.out.println(strBuffer.toString());
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java
new file mode 100644
index 0000000..30d67f0
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixProbeOnly.java
@@ -0,0 +1,94 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+
+public class TOccurrenceSearcherSuffixProbeOnly extends TOccurrenceSearcher {
+
+ public TOccurrenceSearcherSuffixProbeOnly(IHyracksStageletContext ctx, InvertedIndex invIndex,
+ IBinaryTokenizer queryTokenizer) {
+ super(ctx, invIndex, queryTokenizer);
+ }
+
+ protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws IOException {
+ for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+ swap = prevResultBuffers;
+ prevResultBuffers = newResultBuffers;
+ newResultBuffers = swap;
+ currentNumResults = 0;
+
+ invListCursors.get(i).pinPagesSync();
+ maxPrevBufIdx = mergeSuffixListProbe(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+ newResultBuffers, i, numQueryTokens);
+ invListCursors.get(i).unpinPages();
+ }
+ return maxPrevBufIdx;
+ }
+
+ protected int mergeSuffixListProbe(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+ int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) throws IOException {
+
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ int resultTidx = 0;
+
+ MultiComparator invListCmp = invIndex.getInvListElementCmp();
+
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+ while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+
+ if (invListCursor.containsKey(resultTuple, invListCmp)) {
+ count++;
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ } else {
+ if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ }
+ }
+
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java
new file mode 100644
index 0000000..f8bc1ab
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/impls/TOccurrenceSearcherSuffixScanOnly.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.impls;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+
+public class TOccurrenceSearcherSuffixScanOnly extends TOccurrenceSearcher {
+
+ public TOccurrenceSearcherSuffixScanOnly(IHyracksStageletContext ctx, InvertedIndex invIndex,
+ IBinaryTokenizer queryTokenizer) {
+ super(ctx, invIndex, queryTokenizer);
+ }
+
+ protected int mergeSuffixLists(int numPrefixTokens, int numQueryTokens, int maxPrevBufIdx) throws IOException {
+ for (int i = numPrefixTokens; i < numQueryTokens; i++) {
+ swap = prevResultBuffers;
+ prevResultBuffers = newResultBuffers;
+ newResultBuffers = swap;
+ currentNumResults = 0;
+
+ invListCursors.get(i).pinPagesSync();
+ maxPrevBufIdx = mergeSuffixListScan(invListCursors.get(i), prevResultBuffers, maxPrevBufIdx,
+ newResultBuffers, i, numQueryTokens);
+ invListCursors.get(i).unpinPages();
+ }
+ return maxPrevBufIdx;
+ }
+
+ protected int mergeSuffixListScan(IInvertedListCursor invListCursor, List<ByteBuffer> prevResultBuffers,
+ int maxPrevBufIdx, List<ByteBuffer> newResultBuffers, int invListIx, int numQueryTokens) throws IOException {
+
+ int newBufIdx = 0;
+ ByteBuffer newCurrentBuffer = newResultBuffers.get(0);
+
+ int prevBufIdx = 0;
+ ByteBuffer prevCurrentBuffer = prevResultBuffers.get(0);
+
+ boolean advanceCursor = true;
+ boolean advancePrevResult = false;
+ int resultTidx = 0;
+
+ MultiComparator invListCmp = invIndex.getInvListElementCmp();
+
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultFrameTupleApp.reset(newCurrentBuffer, true);
+
+ while (invListCursor.hasNext() && resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ if (advanceCursor)
+ invListCursor.next();
+
+ ITupleReference invListTuple = invListCursor.getTuple();
+
+ resultTuple.reset(prevCurrentBuffer.array(), resultFrameTupleAcc.getTupleStartOffset(resultTidx));
+
+ int cmp = invListCmp.compare(invListTuple, resultTuple);
+ if (cmp == 0) {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1)) + 1;
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ advanceCursor = true;
+ advancePrevResult = true;
+ } else {
+ if (cmp < 0) {
+ advanceCursor = true;
+ advancePrevResult = false;
+ } else {
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ if (count + numQueryTokens - invListIx > occurrenceThreshold) {
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+ }
+ advanceCursor = false;
+ advancePrevResult = true;
+ }
+ }
+
+ if (advancePrevResult) {
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+ }
+
+ // append remaining elements from previous result set
+ while (resultTidx < resultFrameTupleAcc.getTupleCount()) {
+
+ int count = IntegerSerializerDeserializer.getInt(resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(resultTuple.getFieldCount() - 1));
+ newBufIdx = appendTupleToNewResults(resultTuple, count, newBufIdx);
+
+ resultTidx++;
+ if (resultTidx >= resultFrameTupleAcc.getTupleCount()) {
+ prevBufIdx++;
+ if (prevBufIdx <= maxPrevBufIdx) {
+ prevCurrentBuffer = prevResultBuffers.get(prevBufIdx);
+ resultFrameTupleAcc.reset(prevCurrentBuffer);
+ resultTidx = 0;
+ }
+ }
+ }
+
+ return newBufIdx;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java
new file mode 100644
index 0000000..55945be
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/ConjunctiveSearchModifier.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class ConjunctiveSearchModifier implements IInvertedIndexSearchModifier {
+
+ @Override
+ public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+ return invListCursors.size();
+ }
+
+ @Override
+ public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+ Collections.sort(invListCursors);
+ return 1;
+ }
+
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java
new file mode 100644
index 0000000..ac109b6
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/EditDistanceSearchModifier.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class EditDistanceSearchModifier implements IInvertedIndexSearchModifier {
+
+ private int gramLength;
+ private int edThresh;
+
+ public EditDistanceSearchModifier(int gramLength, int edThresh) {
+ this.gramLength = gramLength;
+ this.edThresh = edThresh;
+ }
+
+ @Override
+ public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+ return invListCursors.size() - edThresh * gramLength;
+ }
+
+ @Override
+ public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+ Collections.sort(invListCursors);
+ return invListCursors.size() - getOccurrenceThreshold(invListCursors) + 1;
+ }
+
+ public int getGramLength() {
+ return gramLength;
+ }
+
+ public void setGramLength(int gramLength) {
+ this.gramLength = gramLength;
+ }
+
+ public int getEdThresh() {
+ return edThresh;
+ }
+
+ public void setEdThresh(int edThresh) {
+ this.edThresh = edThresh;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java
new file mode 100644
index 0000000..f42841c
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchmodifiers/JaccardSearchModifier.java
@@ -0,0 +1,53 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers;
+
+import java.util.Collections;
+import java.util.List;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+
+public class JaccardSearchModifier implements IInvertedIndexSearchModifier {
+
+ private float jaccThresh;
+
+ public JaccardSearchModifier(float jaccThresh) {
+ this.jaccThresh = jaccThresh;
+ }
+
+ @Override
+ public int getOccurrenceThreshold(List<IInvertedListCursor> invListCursors) {
+ return (int) Math.floor((float) invListCursors.size() * jaccThresh);
+ }
+
+ @Override
+ public int getPrefixLists(List<IInvertedListCursor> invListCursors) {
+ Collections.sort(invListCursors);
+ if (invListCursors.size() == 0) {
+ return 0;
+ }
+ return invListCursors.size() - getOccurrenceThreshold(invListCursors) + 1;
+ }
+
+ public float getJaccThresh() {
+ return jaccThresh;
+ }
+
+ public void setJaccThresh(float jaccThresh) {
+ this.jaccThresh = jaccThresh;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
new file mode 100644
index 0000000..bbb32d6
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8StringBinaryTokenizer.java
@@ -0,0 +1,78 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public abstract class AbstractUTF8StringBinaryTokenizer implements
+ IBinaryTokenizer {
+
+ protected byte[] data;
+ protected int start;
+ protected int length;
+ protected int tokenLength;
+ protected int index;
+ protected int utf8Length;
+
+ protected final IntArray tokensStart;
+ protected final IntArray tokensLength;
+ protected final IToken token;
+
+ protected final boolean ignoreTokenCount;
+ protected final boolean sourceHasTypeTag;
+
+ public AbstractUTF8StringBinaryTokenizer(boolean ignoreTokenCount,
+ boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+ this.ignoreTokenCount = ignoreTokenCount;
+ this.sourceHasTypeTag = sourceHasTypeTag;
+ if (!ignoreTokenCount) {
+ tokensStart = new IntArray();
+ tokensLength = new IntArray();
+ } else {
+ tokensStart = null;
+ tokensLength = null;
+ }
+ token = tokenFactory.createToken();
+ }
+
+ @Override
+ public IToken getToken() {
+ return token;
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length) {
+ this.start = start;
+ index = this.start;
+ if (sourceHasTypeTag) {
+ index++; // skip type tag
+ }
+ utf8Length = StringUtils.getUTFLen(data, index);
+ index += 2; // skip utf8 length indicator
+ this.data = data;
+ this.length = length + start;
+
+ tokenLength = 0;
+ if (!ignoreTokenCount) {
+ tokensStart.reset();
+ tokensLength.reset();
+ }
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
new file mode 100644
index 0000000..92d6ac2
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8Token.java
@@ -0,0 +1,106 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public abstract class AbstractUTF8Token implements IToken {
+ public static final int GOLDEN_RATIO_32 = 0x09e3779b9;
+
+ protected int length;
+ protected int tokenLength;
+ protected int start;
+ protected int tokenCount;
+ protected byte[] data;
+ protected final byte tokenTypeTag;
+ protected final byte countTypeTag;
+
+ public AbstractUTF8Token() {
+ tokenTypeTag = -1;
+ countTypeTag = -1;
+ }
+
+ public AbstractUTF8Token(byte tokenTypeTag, byte countTypeTag) {
+ this.tokenTypeTag = tokenTypeTag;
+ this.countTypeTag = countTypeTag;
+ }
+
+ @Override
+ public byte[] getData() {
+ return data;
+ }
+
+ @Override
+ public int getLength() {
+ return length;
+ }
+
+ public int getLowerCaseUTF8Len(int size) {
+ int lowerCaseUTF8Len = 0;
+ int pos = start;
+ for (int i = 0; i < size; i++) {
+ char c = StringUtils.toLowerCase(StringUtils.charAt(data, pos));
+ lowerCaseUTF8Len += StringUtils.getModifiedUTF8Len(c);
+ pos += StringUtils.charSize(data, pos);
+ }
+ return lowerCaseUTF8Len;
+ }
+
+ @Override
+ public int getStart() {
+ return start;
+ }
+
+ @Override
+ public int getTokenLength() {
+ return tokenLength;
+ }
+
+ public void handleCountTypeTag(DataOutput dos) throws IOException {
+ if (countTypeTag > 0) {
+ dos.write(countTypeTag);
+ }
+ }
+
+ public void handleTokenTypeTag(DataOutput dos) throws IOException {
+ if (tokenTypeTag > 0) {
+ dos.write(tokenTypeTag);
+ }
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length, int tokenLength,
+ int tokenCount) {
+ this.data = data;
+ this.start = start;
+ this.length = length;
+ this.tokenLength = tokenLength;
+ this.tokenCount = tokenCount;
+ }
+
+ @Override
+ public void serializeTokenCount(DataOutput dos) throws IOException {
+ handleCountTypeTag(dos);
+ dos.writeInt(tokenCount);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
new file mode 100644
index 0000000..3b0b82d
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/AbstractUTF8TokenFactory.java
@@ -0,0 +1,36 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public abstract class AbstractUTF8TokenFactory implements ITokenFactory {
+ private static final long serialVersionUID = 1L;
+ protected final byte tokenTypeTag;
+ protected final byte countTypeTag;
+
+ public AbstractUTF8TokenFactory() {
+ tokenTypeTag = -1;
+ countTypeTag = -1;
+ }
+
+ public AbstractUTF8TokenFactory(byte tokenTypeTag, byte countTypeTag) {
+ this.tokenTypeTag = tokenTypeTag;
+ this.countTypeTag = countTypeTag;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
index 425436b..de9ad2c 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizer.java
@@ -1,116 +1,87 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
+ * You may obtain a copy of the License at
+ *
* http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
*/
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-public class DelimitedUTF8StringBinaryTokenizer implements IBinaryTokenizer {
+public class DelimitedUTF8StringBinaryTokenizer extends
+ AbstractUTF8StringBinaryTokenizer {
- private static final RecordDescriptor tokenSchema = new RecordDescriptor(
- new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE });
+ public DelimitedUTF8StringBinaryTokenizer(boolean ignoreTokenCount,
+ boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+ super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
+ }
- private final char delimiter;
- private final byte typeTag;
- private byte[] data;
- private int start;
- private int length;
+ @Override
+ public boolean hasNext() {
+ // skip delimiters
+ while (index < length && isSeparator(StringUtils.charAt(data, index))) {
+ index += StringUtils.charSize(data, index);
+ }
+ return index < length;
+ }
- private int tokenLength;
- private int tokenStart;
- private int pos;
+ private boolean isSeparator(char c) {
+ return !(Character.isLetterOrDigit(c)
+ || Character.getType(c) == Character.OTHER_LETTER || Character
+ .getType(c) == Character.OTHER_NUMBER);
+ }
- public DelimitedUTF8StringBinaryTokenizer(char delimiter, byte typeTag) {
- this.delimiter = delimiter;
- this.typeTag = typeTag;
- }
+ @Override
+ public void next() {
+ tokenLength = 0;
+ int currentTokenStart = index;
+ while (index < length && !isSeparator(StringUtils.charAt(data, index))) {
+ index += StringUtils.charSize(data, index);
+ tokenLength++;
+ }
+ int tokenCount = 1;
+ if (tokenLength > 0 && !ignoreTokenCount) {
+ // search if we got the same token before
+ for (int i = 0; i < tokensStart.length(); ++i) {
+ if (tokenLength == tokensLength.get(i)) {
+ int tokenStart = tokensStart.get(i);
+ tokenCount++; // assume we found it
+ int offset = 0;
+ int currLength = 0;
+ while (currLength < tokenLength) {
+ // case insensitive comparison
+ if (StringUtils.toLowerCase(StringUtils.charAt(data,
+ currentTokenStart + offset)) != StringUtils
+ .toLowerCase(StringUtils.charAt(data,
+ tokenStart + offset))) {
+ tokenCount--;
+ break;
+ }
+ offset += StringUtils.charSize(data, currentTokenStart
+ + offset);
+ currLength++;
+ }
+ }
+ }
+ // add the new token to the list of seen tokens
+ tokensStart.add(currentTokenStart);
+ tokensLength.add(tokenLength);
+ }
- public DelimitedUTF8StringBinaryTokenizer(char delimiter) {
- this.delimiter = delimiter;
- this.typeTag = -1;
- }
-
- @Override
- public int getTokenLength() {
- return tokenLength;
- }
-
- @Override
- public int getTokenStartOff() {
- return tokenStart;
- }
-
- @Override
- public boolean hasNext() {
- if (pos >= start + length)
- return false;
- else
- return true;
- }
-
- @Override
- public void next() {
- tokenLength = 0;
- tokenStart = pos;
- while (pos < start + length) {
- int len = StringUtils.charSize(data, pos);
- char ch = StringUtils.charAt(data, pos);
- pos += len;
- if (ch == delimiter) {
- break;
- }
- tokenLength += len;
- }
- }
-
- @Override
- public void reset(byte[] data, int start, int length) {
- this.data = data;
- this.start = start;
- this.pos = start;
- this.length = length;
- this.tokenLength = 0;
- this.tokenStart = 0;
- pos += 2; // UTF-8 specific
- }
-
- @Override
- public void writeToken(DataOutput dos) throws IOException {
- if (typeTag > 0)
- dos.write(typeTag);
-
- // WARNING: 2-byte length indicator is specific to UTF-8
- dos.writeShort((short) tokenLength);
- dos.write(data, tokenStart, tokenLength);
- }
-
- @Override
- public RecordDescriptor getTokenSchema() {
- return tokenSchema;
- }
-
- // cannot be implemented for this tokenizer
- @Override
- public int getNumTokens() {
- return -1;
- }
+ // set token
+ token.reset(data, currentTokenStart, index, tokenLength, tokenCount);
+ }
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
index 2e85db5..4a350b3 100644
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/DelimitedUTF8StringBinaryTokenizerFactory.java
@@ -1,41 +1,42 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
+ * You may obtain a copy of the License at
+ *
* http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
*
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
*/
package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
+public class DelimitedUTF8StringBinaryTokenizerFactory implements
+ IBinaryTokenizerFactory {
-public class DelimitedUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
+ private static final long serialVersionUID = 1L;
+ private final boolean ignoreTokenCount;
+ private final boolean sourceHasTypeTag;
+ private final ITokenFactory tokenFactory;
- private static final long serialVersionUID = 1L;
- private final char delimiter;
- private final byte typeTag;
+ public DelimitedUTF8StringBinaryTokenizerFactory(boolean ignoreTokenCount,
+ boolean sourceHasTypeTag, ITokenFactory tokenFactory) {
+ this.ignoreTokenCount = ignoreTokenCount;
+ this.sourceHasTypeTag = sourceHasTypeTag;
+ this.tokenFactory = tokenFactory;
+ }
- public DelimitedUTF8StringBinaryTokenizerFactory(char delimiter, byte typeTag) {
- this.delimiter = delimiter;
- this.typeTag = typeTag;
- }
-
- public DelimitedUTF8StringBinaryTokenizerFactory(char delimiter) {
- this.delimiter = delimiter;
- this.typeTag = -1;
- }
-
- @Override
- public IBinaryTokenizer createBinaryTokenizer() {
- return new DelimitedUTF8StringBinaryTokenizer(delimiter, typeTag);
- }
+ @Override
+ public IBinaryTokenizer createTokenizer() {
+ return new DelimitedUTF8StringBinaryTokenizer(ignoreTokenCount,
+ sourceHasTypeTag, tokenFactory);
+ }
}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java
deleted file mode 100644
index 2cc0b6c..0000000
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizer.java
+++ /dev/null
@@ -1,150 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
-
-import java.io.DataOutput;
-import java.io.IOException;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-
-public class HashedQGramUTF8StringBinaryTokenizer implements IBinaryTokenizer {
-
- private static final RecordDescriptor tokenSchema = new RecordDescriptor(
- new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE });
-
- private final boolean prePost;
- private final int q;
- private byte[] data;
- private int start;
- private int length;
- private int gramNum;
- private int utflen;
-
- private final char PRECHAR = '#';
- private final char POSTCHAR = '$';
-
- private int charPos;
- private int pos;
- private int hashedGram;
-
- HashedQGramUTF8StringBinaryTokenizer(int q, boolean prePost) {
- this.prePost = prePost;
- this.q = q;
- }
-
- @Override
- public int getTokenLength() {
- // the produced token (hashed q-gram) is derived from data
- // but not contained in it
- // therefore this call does not make sense
- return -1;
- }
-
- @Override
- public int getTokenStartOff() {
- // the produced token (hashed q-gram) is derived from data
- // but not contained in it
- // therefore this call does not make sense
- return -1;
- }
-
- @Override
- public boolean hasNext() {
- if ((prePost && pos >= start + length) || (!prePost && pos >= start + length - q))
- return false;
- else
- return true;
- }
-
- @Override
- public void next() {
- hashedGram = 0;
- if (prePost) {
- if (gramNum < q) {
- for (int i = 0; i < q - gramNum; i++) {
- hashedGram = 31 * hashedGram + PRECHAR;
- }
-
- int tmpPos = pos;
- for (int i = 0; i < gramNum; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
- } else {
- int stopStr = Math.min(charPos + q, utflen);
- int tmpPos = pos;
- for (int i = charPos; i < stopStr; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
-
- int stopPost = (charPos + q) - (utflen);
- for (int i = 0; i < stopPost; i++) {
- hashedGram = 31 * hashedGram + POSTCHAR;
- }
- pos += StringUtils.charSize(data, pos);
- charPos++;
- }
- gramNum++;
- } else {
- int tmpPos = pos;
- for (int i = charPos; i < charPos + q; i++) {
- hashedGram = 31 * hashedGram + StringUtils.charAt(data, tmpPos);
- tmpPos += StringUtils.charSize(data, tmpPos);
- }
- pos += StringUtils.charSize(data, pos);
- charPos++;
- }
- }
-
- @Override
- public void reset(byte[] data, int start, int length) {
- this.data = data;
- this.start = start;
- this.length = length;
- this.utflen = StringUtils.getUTFLen(data, start);
- this.pos = start + 2; // UTF-8 specific
- this.gramNum = 1;
- this.charPos = 0;
- }
-
- @Override
- public void writeToken(DataOutput dos) throws IOException {
- dos.writeInt(hashedGram);
- }
-
- public char getPreChar() {
- return PRECHAR;
- }
-
- public char getPostChar() {
- return POSTCHAR;
- }
-
- @Override
- public RecordDescriptor getTokenSchema() {
- return tokenSchema;
- }
-
- @Override
- public int getNumTokens() {
- return 0;
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java
deleted file mode 100644
index a11fe8a..0000000
--- a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedQGramUTF8StringBinaryTokenizerFactory.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
-
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizerFactory;
-
-public class HashedQGramUTF8StringBinaryTokenizerFactory implements IBinaryTokenizerFactory {
-
- private static final long serialVersionUID = 1L;
- private final int q;
- private final boolean prePost;
-
- public HashedQGramUTF8StringBinaryTokenizerFactory(int q, boolean prePost) {
- this.q = q;
- this.prePost = prePost;
- }
-
- @Override
- public IBinaryTokenizer createBinaryTokenizer() {
- return new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
- }
-}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
new file mode 100644
index 0000000..25d1a2c
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramToken.java
@@ -0,0 +1,64 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class HashedUTF8NGramToken extends UTF8NGramToken {
+ public HashedUTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public void serializeToken(DataOutput dos) throws IOException {
+ handleTokenTypeTag(dos);
+
+ int hash = GOLDEN_RATIO_32;
+
+ // pre chars
+ for (int i = 0; i < numPreChars; i++) {
+ hash ^= PRECHAR;
+ hash *= GOLDEN_RATIO_32;
+ }
+
+ // regular chars
+ int numRegGrams = tokenLength - numPreChars - numPostChars;
+ int pos = start;
+ for (int i = 0; i < numRegGrams; i++) {
+ hash ^= StringUtils.toLowerCase(StringUtils.charAt(data, pos));
+ hash *= GOLDEN_RATIO_32;
+ pos += StringUtils.charSize(data, pos);
+ }
+
+ // post chars
+ for (int i = 0; i < numPostChars; i++) {
+ hash ^= POSTCHAR;
+ hash *= GOLDEN_RATIO_32;
+ }
+
+ // token count
+ hash += tokenCount;
+
+ dos.writeInt(hash);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
new file mode 100644
index 0000000..4a87793
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8NGramTokenFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class HashedUTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public HashedUTF8NGramTokenFactory() {
+ super();
+ }
+
+ public HashedUTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public IToken createToken() {
+ return new HashedUTF8NGramToken(tokenTypeTag, countTypeTag);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
new file mode 100644
index 0000000..55237ce
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordToken.java
@@ -0,0 +1,88 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class HashedUTF8WordToken extends UTF8WordToken {
+
+ private int hash = 0;
+
+ public HashedUTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null) {
+ return false;
+ }
+ if (!(o instanceof IToken)) {
+ return false;
+ }
+ IToken t = (IToken) o;
+ if (t.getTokenLength() != tokenLength) {
+ return false;
+ }
+ int offset = 0;
+ for (int i = 0; i < tokenLength; i++) {
+ if (StringUtils.charAt(t.getData(), t.getStart() + offset) != StringUtils
+ .charAt(data, start + offset)) {
+ return false;
+ }
+ offset += StringUtils.charSize(data, start + offset);
+ }
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ return hash;
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length, int tokenLength,
+ int tokenCount) {
+ super.reset(data, start, length, tokenLength, tokenCount);
+
+ // pre-compute hash value using JAQL-like string hashing
+ int pos = start;
+ hash = GOLDEN_RATIO_32;
+ for (int i = 0; i < tokenLength; i++) {
+ hash ^= StringUtils.toLowerCase(StringUtils.charAt(data, pos));
+ hash *= GOLDEN_RATIO_32;
+ pos += StringUtils.charSize(data, pos);
+ }
+ hash += tokenCount;
+ }
+
+ @Override
+ public void serializeToken(DataOutput dos) throws IOException {
+ if (tokenTypeTag > 0) {
+ dos.write(tokenTypeTag);
+ }
+
+ // serialize hash value
+ dos.writeInt(hash);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
new file mode 100644
index 0000000..318f041
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/HashedUTF8WordTokenFactory.java
@@ -0,0 +1,38 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class HashedUTF8WordTokenFactory extends AbstractUTF8TokenFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public HashedUTF8WordTokenFactory() {
+ super();
+ }
+
+ public HashedUTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public IToken createToken() {
+ return new HashedUTF8WordToken(tokenTypeTag, countTypeTag);
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java
new file mode 100644
index 0000000..05c6d0b
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizer.java
@@ -0,0 +1,30 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public interface IBinaryTokenizer {
+ public IToken getToken();
+
+ public boolean hasNext();
+
+ public void next();
+
+ public void reset(byte[] data, int start, int length);
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java
new file mode 100644
index 0000000..bfe78ee
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IBinaryTokenizerFactory.java
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.Serializable;
+
+public interface IBinaryTokenizerFactory extends Serializable {
+ public IBinaryTokenizer createTokenizer();
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java
new file mode 100644
index 0000000..befc6d2
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/INGramToken.java
@@ -0,0 +1,28 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public interface INGramToken {
+ public int getNumPostChars();
+
+ public int getNumPreChars();
+
+ public void setNumPrePostChars(int numPreChars, int numPostChars);
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
new file mode 100644
index 0000000..c1840d7
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IToken.java
@@ -0,0 +1,40 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+public interface IToken {
+ public byte[] getData();
+
+ public int getLength();
+
+ public int getStart();
+
+ public int getTokenLength();
+
+ public void reset(byte[] data, int start, int length, int tokenLength,
+ int tokenCount);
+
+ public void serializeToken(DataOutput dos) throws IOException;
+
+ public void serializeTokenCount(DataOutput dos) throws IOException;
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java
new file mode 100644
index 0000000..8b5d71d
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/ITokenFactory.java
@@ -0,0 +1,26 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.Serializable;
+
+public interface ITokenFactory extends Serializable {
+ public IToken createToken();
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java
new file mode 100644
index 0000000..2eb9ff4
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/IntArray.java
@@ -0,0 +1,80 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Rares Vernica <rares (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.util.Arrays;
+
+public class IntArray {
+ private static final int SIZE = 128;
+
+ private int[] data;
+ private int length;
+
+ public IntArray() {
+ data = new int[SIZE];
+ length = 0;
+ }
+
+ public void add(int d) {
+ if (length == data.length) {
+ data = Arrays.copyOf(data, data.length << 1);
+ }
+ data[length++] = d;
+ }
+
+ public int[] get() {
+ return data;
+ }
+
+ public int get(int i) {
+ return data[i];
+ }
+
+ public int length() {
+ return length;
+ }
+
+ public void reset() {
+ length = 0;
+ }
+
+ public void sort() {
+ sort(0, length);
+ }
+
+ public void sort(int start, int end) {
+ Arrays.sort(data, start, end);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder out = new StringBuilder();
+ out.append('[');
+ for (int i = 0; i < length; ++i) {
+ out.append(data[i]);
+ if (i < length - 1) {
+ out.append(',');
+ out.append(' ');
+ }
+ }
+ out.append(']');
+ return out.toString();
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
new file mode 100644
index 0000000..2a13f83
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/NGramUTF8StringBinaryTokenizer.java
@@ -0,0 +1,123 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class NGramUTF8StringBinaryTokenizer extends
+ AbstractUTF8StringBinaryTokenizer {
+
+ private int gramLength;
+ private boolean usePrePost;
+
+ private int gramNum;
+ private int totalGrams;
+
+ private final INGramToken concreteToken;
+
+ public NGramUTF8StringBinaryTokenizer(int gramLength, boolean usePrePost,
+ boolean ignoreTokenCount, boolean sourceHasTypeTag,
+ ITokenFactory tokenFactory) {
+ super(ignoreTokenCount, sourceHasTypeTag, tokenFactory);
+ this.gramLength = gramLength;
+ this.usePrePost = usePrePost;
+ concreteToken = (INGramToken) token;
+ }
+
+ @Override
+ public boolean hasNext() {
+ if (gramNum < totalGrams) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public void next() {
+ int currentTokenStart = index;
+ int tokenCount = 1;
+ int numPreChars = 0;
+ int numPostChars = 0;
+ if (usePrePost) {
+ numPreChars = Math.max(gramLength - gramNum - 1, 0);
+ numPostChars = (gramNum > totalGrams - gramLength) ? gramLength
+ - totalGrams + gramNum : 0;
+ }
+ gramNum++;
+
+ concreteToken.setNumPrePostChars(numPreChars, numPostChars);
+ if (numPreChars == 0) {
+ index += StringUtils.charSize(data, index);
+ }
+
+ // compute token count
+ // ignore pre and post grams for duplicate detection
+ if (!ignoreTokenCount && numPreChars == 0 && numPostChars == 0) {
+ int tmpIndex = start;
+ while (tmpIndex < currentTokenStart) {
+ tokenCount++; // assume found
+ int offset = 0;
+ for (int j = 0; j < gramLength; j++) {
+ if (StringUtils.toLowerCase(StringUtils.charAt(data,
+ currentTokenStart + offset)) != StringUtils
+ .toLowerCase(StringUtils.charAt(data, tmpIndex
+ + offset))) {
+ tokenCount--;
+ break;
+ }
+ offset += StringUtils.charSize(data, tmpIndex + offset);
+ }
+ tmpIndex += StringUtils.charSize(data, tmpIndex);
+ }
+ }
+
+ // set token
+ token.reset(data, currentTokenStart, length, gramLength, tokenCount);
+ }
+
+ @Override
+ public void reset(byte[] data, int start, int length) {
+ super.reset(data, start, length);
+ gramNum = 0;
+
+ int numChars = 0;
+ int pos = index;
+ int end = pos + utf8Length;
+ while (pos < end) {
+ numChars++;
+ pos += StringUtils.charSize(data, pos);
+ }
+
+ if (usePrePost) {
+ totalGrams = numChars + gramLength - 1;
+ } else {
+ totalGrams = numChars - gramLength + 1;
+ }
+ }
+
+ public void setGramlength(int gramLength) {
+ this.gramLength = gramLength;
+ }
+
+ public void setPrePost(boolean usePrePost) {
+ this.usePrePost = usePrePost;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
new file mode 100644
index 0000000..6b6406f
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramToken.java
@@ -0,0 +1,86 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class UTF8NGramToken extends AbstractUTF8Token implements INGramToken {
+
+ public final static char PRECHAR = '#';
+
+ public final static char POSTCHAR = '$';
+
+ protected int numPreChars;
+ protected int numPostChars;
+
+ public UTF8NGramToken(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public int getNumPostChars() {
+ return numPreChars;
+ }
+
+ @Override
+ public int getNumPreChars() {
+ return numPostChars;
+ }
+
+ @Override
+ public void serializeToken(DataOutput dos) throws IOException {
+ handleTokenTypeTag(dos);
+
+ // regular chars
+ int numRegChars = tokenLength - numPreChars - numPostChars;
+
+ // assuming pre and post char need 1-byte each in utf8
+ int tokenUTF8Len = getLowerCaseUTF8Len(numRegChars) + numPreChars
+ + numPostChars;
+
+ // write utf8 length indicator
+ StringUtils.writeUTF8Len(tokenUTF8Len, dos);
+
+ // pre chars
+ for (int i = 0; i < numPreChars; i++) {
+ StringUtils.writeCharAsModifiedUTF8(PRECHAR, dos);
+ }
+
+ int pos = start;
+ for (int i = 0; i < numRegChars; i++) {
+ char c = StringUtils.toLowerCase(StringUtils.charAt(data, pos));
+ StringUtils.writeCharAsModifiedUTF8(c, dos);
+ pos += StringUtils.charSize(data, pos);
+ }
+
+ // post chars
+ for (int i = 0; i < numPostChars; i++) {
+ StringUtils.writeCharAsModifiedUTF8(POSTCHAR, dos);
+ }
+ }
+
+ public void setNumPrePostChars(int numPreChars, int numPostChars) {
+ this.numPreChars = numPreChars;
+ this.numPostChars = numPostChars;
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java
new file mode 100644
index 0000000..968d8e1
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8NGramTokenFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class UTF8NGramTokenFactory extends AbstractUTF8TokenFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public UTF8NGramTokenFactory() {
+ super();
+ }
+
+ public UTF8NGramTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public IToken createToken() {
+ return new UTF8NGramToken(tokenTypeTag, countTypeTag);
+ }
+
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
new file mode 100644
index 0000000..25e0cd3
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordToken.java
@@ -0,0 +1,46 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.dataflow.common.data.util.StringUtils;
+
+public class UTF8WordToken extends AbstractUTF8Token {
+
+ public UTF8WordToken(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public void serializeToken(DataOutput dos) throws IOException {
+ handleTokenTypeTag(dos);
+
+ int tokenUTF8Len = getLowerCaseUTF8Len(tokenLength);
+ StringUtils.writeUTF8Len(tokenUTF8Len, dos);
+ int pos = start;
+ for (int i = 0; i < tokenLength; i++) {
+ char c = StringUtils.toLowerCase(StringUtils.charAt(data, pos));
+ StringUtils.writeCharAsModifiedUTF8(c, dos);
+ pos += StringUtils.charSize(data, pos);
+ }
+ }
+}
diff --git a/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java
new file mode 100644
index 0000000..4358254
--- /dev/null
+++ b/hyracks-storage-am-invertedindex/src/main/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/UTF8WordTokenFactory.java
@@ -0,0 +1,39 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+public class UTF8WordTokenFactory extends AbstractUTF8TokenFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ public UTF8WordTokenFactory() {
+ super();
+ }
+
+ public UTF8WordTokenFactory(byte tokenTypeTag, byte countTypeTag) {
+ super(tokenTypeTag, countTypeTag);
+ }
+
+ @Override
+ public IToken createToken() {
+ return new UTF8WordToken(tokenTypeTag, countTypeTag);
+ }
+
+}
diff --git a/hyracks-storage-am-invertedindex/.classpath b/hyracks-storage-am-rtree/.classpath
similarity index 100%
copy from hyracks-storage-am-invertedindex/.classpath
copy to hyracks-storage-am-rtree/.classpath
diff --git a/hyracks-storage-am-invertedindex/.project b/hyracks-storage-am-rtree/.project
similarity index 91%
rename from hyracks-storage-am-invertedindex/.project
rename to hyracks-storage-am-rtree/.project
index 9303fb7..4989318 100644
--- a/hyracks-storage-am-invertedindex/.project
+++ b/hyracks-storage-am-rtree/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>hyracks-storage-am-invertedindex</name>
+ <name>hyracks-storage-am-rtree</name>
<comment></comment>
<projects>
</projects>
diff --git a/hyracks-storage-am-rtree/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-am-rtree/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..375e12e
--- /dev/null
+++ b/hyracks-storage-am-rtree/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,6 @@
+#Fri May 20 19:34:07 PDT 2011
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-storage-am-rtree/.settings/org.maven.ide.eclipse.prefs b/hyracks-storage-am-rtree/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..ba4f536
--- /dev/null
+++ b/hyracks-storage-am-rtree/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,9 @@
+#Wed Feb 02 19:49:19 PST 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+includeModules=false
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-storage-am-rtree/pom.xml b/hyracks-storage-am-rtree/pom.xml
new file mode 100644
index 0000000..86328f7
--- /dev/null
+++ b/hyracks-storage-am-rtree/pom.xml
@@ -0,0 +1,63 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-nc</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
new file mode 100644
index 0000000..ebc17b5
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
@@ -0,0 +1,48 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
+
+public interface IRTreeFrame extends ITreeIndexFrame {
+
+ public ITreeIndexTupleReference createTupleReference();
+
+ public void generateDist(ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end);
+
+ public void computeMBR(ISplitKey splitKey, MultiComparator cmp);
+
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
+
+ public void delete(int tupleIndex, MultiComparator cmp) throws Exception;
+
+ public int getPageNsn();
+
+ public void setPageNsn(int pageNsn);
+
+ public int getRightPage();
+
+ public void setRightPage(int rightPage);
+
+ public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp);
+
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
new file mode 100644
index 0000000..824c6b0
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.api;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
+
+public interface IRTreeInteriorFrame extends IRTreeFrame {
+
+ public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
+
+ public int getBestChildPageId(MultiComparator cmp);
+
+ public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+ public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
+
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentId, MultiComparator cmp);
+
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+ public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+ public void enlarge(ITupleReference tuple, MultiComparator cmp);
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
similarity index 63%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
copy to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
index 38d6217..c85712d 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/api/IBTreeTupleReference.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
@@ -13,18 +13,14 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.api;
-
-import java.nio.ByteBuffer;
+package edu.uci.ics.hyracks.storage.am.rtree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-public interface IBTreeTupleReference extends ITupleReference {
- public void setFieldCount(int fieldCount);
+public interface IRTreeLeafFrame extends IRTreeFrame {
- public void setFieldCount(int fieldStartIndex, int fieldCount);
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp);
- public void resetByOffset(ByteBuffer buf, int tupleStartOffset);
-
- public void resetByTupleIndex(IBTreeFrame frame, int tupleIndex);
+ public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
new file mode 100644
index 0000000..5b8931d
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
@@ -0,0 +1,50 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class RTreeOpHelper extends TreeIndexOpHelper {
+
+ protected MultiComparator interiorCmp;
+
+ public RTreeOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, IHyracksStageletContext ctx, int partition,
+ IndexHelperOpenMode mode) {
+ super(opDesc, ctx, partition, mode);
+ }
+
+ public ITreeIndex createTreeIndex() throws HyracksDataException {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ ITreeIndexMetaDataFrameFactory metaDataFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, indexFileId, 0,
+ metaDataFrameFactory);
+
+ return new RTree(bufferCache, freePageManager, opDesc.getTreeIndexInteriorFactory(),
+ opDesc.getTreeIndexLeafFactory(), cmp);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
new file mode 100644
index 0000000..b26f21a
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+
+public class RTreeOpHelperFactory implements ITreeIndexOpHelperFactory {
+
+ private static final long serialVersionUID = 1L;
+
+ @Override
+ public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+ IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
+ return new RTreeOpHelper(opDesc, ctx, partition, mode);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
new file mode 100644
index 0000000..8aab763
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.std.file.IFileSplitProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+
+public class RTreeSearchOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
+
+ private static final long serialVersionUID = 1L;
+
+ private int[] keyFields; // fields in input tuple to be used as keys
+
+ public RTreeSearchOperatorDescriptor(JobSpecification spec, RecordDescriptor recDesc,
+ IStorageManagerInterface storageManager, IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider,
+ IFileSplitProvider fileSplitProvider, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
+ IBinaryComparatorFactory[] comparatorFactories, int[] keyFields, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 1, 1, recDesc, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
+ this.keyFields = keyFields;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(final IHyracksStageletContext ctx, final IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
+ return new RTreeSearchOperatorNodePushable(this, ctx, partition, recordDescProvider, keyFields);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
new file mode 100644
index 0000000..6a13d9f
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.AbstractTreeIndexOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.PermutingFrameTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+
+public class RTreeSearchOperatorNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
+ private TreeIndexOpHelper treeIndexOpHelper;
+ private FrameTupleAccessor accessor;
+
+ private ByteBuffer writeBuffer;
+ private FrameTupleAppender appender;
+ private ArrayTupleBuilder tb;
+ private DataOutput dos;
+
+ private RTree rtree;
+ private PermutingFrameTupleReference searchKey;
+ private SearchPredicate searchPred;
+ private MultiComparator cmp;
+ private ITreeIndexCursor cursor;
+ private ITreeIndexFrame interiorFrame;
+ private ITreeIndexFrame leafFrame;
+ private RTreeOpContext opCtx;
+
+ private RecordDescriptor recDesc;
+
+ public RTreeSearchOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
+ int partition, IRecordDescriptorProvider recordDescProvider, int[] keyFields) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.OPEN);
+
+ this.recDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ if (keyFields != null && keyFields.length > 0) {
+ searchKey = new PermutingFrameTupleReference();
+ searchKey.setFieldPermutation(keyFields);
+ }
+ }
+
+ @Override
+ public void open() throws HyracksDataException {
+ AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
+
+ interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+ leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+ cursor = new RTreeSearchCursor((IRTreeInteriorFrame) interiorFrame, (IRTreeLeafFrame) leafFrame);
+
+ try {
+
+ treeIndexOpHelper.init();
+ rtree = (RTree) treeIndexOpHelper.getTreeIndex();
+
+ int keySearchFields = rtree.getCmp().getComparators().length;
+
+ IBinaryComparator[] keySearchComparators = new IBinaryComparator[keySearchFields];
+ for (int i = 0; i < keySearchFields; i++) {
+ keySearchComparators[i] = rtree.getCmp().getComparators()[i];
+ }
+ cmp = new MultiComparator(rtree.getCmp().getTypeTraits(), keySearchComparators);
+
+ searchPred = new SearchPredicate(searchKey, cmp);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), recDesc);
+
+ writeBuffer = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ tb = new ArrayTupleBuilder(rtree.getCmp().getFieldCount());
+ dos = tb.getDataOutput();
+ appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksStageletContext().getFrameSize());
+ appender.reset(writeBuffer, true);
+
+ opCtx = rtree.createOpContext(IndexOp.SEARCH, treeIndexOpHelper.getLeafFrame(),
+ treeIndexOpHelper.getInteriorFrame(), null);
+
+ } catch (Exception e) {
+ treeIndexOpHelper.deinit();
+ }
+ }
+
+ private void writeSearchResults() throws Exception {
+ while (cursor.hasNext()) {
+ tb.reset();
+ cursor.next();
+
+ ITupleReference frameTuple = cursor.getTuple();
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
+
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ appender.reset(writeBuffer, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+ }
+
+ @Override
+ public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
+ accessor.reset(buffer);
+
+ int tupleCount = accessor.getTupleCount();
+ try {
+ for (int i = 0; i < tupleCount; i++) {
+ searchKey.reset(accessor, i);
+
+ searchPred.setSearchKey(searchKey);
+ cursor.reset();
+ rtree.search(cursor, searchPred, opCtx);
+ writeSearchResults();
+ }
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void close() throws HyracksDataException {
+ try {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ writer.close();
+ try {
+ cursor.close();
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ } finally {
+ treeIndexOpHelper.deinit();
+ }
+ }
+
+ @Override
+ public void flush() throws HyracksDataException {
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(writeBuffer, writer);
+ }
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
new file mode 100644
index 0000000..3623f04
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -0,0 +1,206 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
+
+public abstract class RTreeNSMFrame extends TreeIndexNSMFrame implements IRTreeFrame {
+ protected static final int pageNsnOff = smFlagOff + 1;
+ protected static final int rightPageOff = pageNsnOff + 4;
+
+ protected ITreeIndexTupleReference[] tuples;
+ protected ITreeIndexTupleReference cmpFrameTuple;
+ protected TupleEntryArrayList tupleEntries1; // used for split and checking
+ // enlargement
+ protected TupleEntryArrayList tupleEntries2; // used for split
+
+ protected Rectangle[] rec;
+
+ protected static final double splitFactor = 0.4;
+ protected static final int nearMinimumOverlapFactor = 32;
+ private static final double doubleEpsilon = computeDoubleEpsilon();
+ private static final int numTuplesEntries = 100;
+
+ public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ super(tupleWriter, new UnorderedSlotManager());
+ this.tuples = new ITreeIndexTupleReference[keyFieldCount];
+ for (int i = 0; i < keyFieldCount; i++) {
+ this.tuples[i] = tupleWriter.createTupleReference();
+ }
+ cmpFrameTuple = tupleWriter.createTupleReference();
+
+ tupleEntries1 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
+ tupleEntries2 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
+ rec = new Rectangle[4];
+ for (int i = 0; i < 4; i++) {
+ rec[i] = new Rectangle(keyFieldCount / 2);
+ }
+ }
+
+ private static double computeDoubleEpsilon() {
+ double doubleEpsilon = 1.0;
+
+ do {
+ doubleEpsilon /= 2.0;
+ } while (1.0 + (doubleEpsilon / 2.0) != 1.0);
+ return doubleEpsilon;
+ }
+
+ public static double doubleEpsilon() {
+ return doubleEpsilon;
+ }
+
+ @Override
+ public void initBuffer(byte level) {
+ super.initBuffer(level);
+ buf.putInt(pageNsnOff, 0);
+ buf.putInt(rightPageOff, -1);
+ }
+
+ public void setTupleCount(int tupleCount) {
+ buf.putInt(tupleCountOff, tupleCount);
+ }
+
+ @Override
+ public void setPageNsn(int pageNsn) {
+ buf.putInt(pageNsnOff, pageNsn);
+ }
+
+ @Override
+ public int getPageNsn() {
+ return buf.getInt(pageNsnOff);
+ }
+
+ @Override
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, rightPageOff + 4);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (rightPageOff + 4));
+ }
+
+ @Override
+ public int getRightPage() {
+ return buf.getInt(rightPageOff);
+ }
+
+ @Override
+ public void setRightPage(int rightPage) {
+ buf.putInt(rightPageOff, rightPage);
+ }
+
+ protected ITreeIndexTupleReference[] getTuples() {
+ return tuples;
+ }
+
+ // for debugging
+ public ArrayList<Integer> getChildren(MultiComparator cmp) {
+ ArrayList<Integer> ret = new ArrayList<Integer>();
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int tupleCount = buf.getInt(tupleCountOff);
+ for (int i = 0; i < tupleCount; i++) {
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+
+ int intVal = IntegerSerializerDeserializer.getInt(
+ buf.array(),
+ frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+ ret.add(intVal);
+ }
+ return ret;
+ }
+
+ public void generateDist(ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
+ int j = 0;
+ while (entries.get(j).getTupleIndex() == -1) {
+ j++;
+ }
+ frameTuple.resetByTupleIndex(this, entries.get(j).getTupleIndex());
+ rec.set(frameTuple);
+ for (int i = start; i < end; ++i) {
+ if (i != j) {
+ if (entries.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, entries.get(i).getTupleIndex());
+ rec.enlarge(frameTuple);
+ } else {
+ rec.enlarge(tuple);
+ }
+ }
+ }
+ }
+
+ @Override
+ public ITreeIndexTupleReference createTupleReference() {
+ return tupleWriter.createTupleReference();
+ }
+
+ public void adjustMBRImpl(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 1; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < maxFieldPos; j++) {
+ int k = maxFieldPos + j;
+ int c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), tuples[j].getFieldData(j), tuples[j].getFieldStart(j),
+ tuples[j].getFieldLength(j));
+ if (c < 0) {
+ tuples[j].resetByTupleIndex(this, i);
+ }
+ c = cmp.getComparators()[k].compare(frameTuple.getFieldData(k), frameTuple.getFieldStart(k),
+ frameTuple.getFieldLength(k), tuples[k].getFieldData(k), tuples[k].getFieldStart(k),
+ tuples[k].getFieldLength(k));
+ if (c > 0) {
+ tuples[k].resetByTupleIndex(this, i);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void computeMBR(ISplitKey splitKey, MultiComparator cmp) {
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ frameTuple.setFieldCount(cmp.getFieldCount());
+
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+
+ splitKey.initData(splitKeySize);
+ this.adjustMBR(tuples, cmp);
+ rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+ }
+
+ @Override
+ public int getPageHeaderSize() {
+ return rightPageOff;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
new file mode 100644
index 0000000..d03c5e9
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -0,0 +1,645 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
+
+public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
+
+ private static final int childPtrSize = 4;
+
+ public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ super(tupleWriter, keyFieldCount);
+ }
+
+ @Override
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
+ }
+
+ @Override
+ public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
+ cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+
+ int bestChild = 0;
+ double minEnlargedArea = Double.MAX_VALUE;
+
+ // the children pointers in the node point to leaves
+ if (getLevel() == 1) {
+ // find least overlap enlargement, use minimum enlarged area to
+ // break tie, if tie still exists use minimum area to break it
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+ tupleEntries1.add(i, enlargedArea);
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = i;
+ }
+ }
+ if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
+ minEnlargedArea = Double.MAX_VALUE;
+ int k;
+ if (getTupleCount() > nearMinimumOverlapFactor) {
+ // sort the entries based on their area enlargement needed
+ // to include the object
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
+ k = nearMinimumOverlapFactor;
+ } else {
+ k = getTupleCount();
+ }
+
+ double minOverlap = Double.MAX_VALUE;
+ int id = 0;
+ for (int i = 0; i < k; ++i) {
+ double difference = 0.0;
+ for (int j = 0; j < getTupleCount(); ++j) {
+ frameTuple.resetByTupleIndex(this, j);
+ cmpFrameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+
+ int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
+ if (c != 0) {
+ double intersection = overlappedArea(frameTuple, tuple, cmpFrameTuple, cmp);
+ if (intersection != 0.0) {
+ difference += intersection - overlappedArea(frameTuple, null, cmpFrameTuple, cmp);
+ }
+ } else {
+ id = j;
+ }
+ }
+
+ double enlargedArea = enlargedArea(cmpFrameTuple, tuple, cmp);
+ if (difference < minOverlap) {
+ minOverlap = difference;
+ minEnlargedArea = enlargedArea;
+ bestChild = id;
+ } else if (difference == minOverlap) {
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = id;
+ } else if (enlargedArea == minEnlargedArea) {
+ double area = area(cmpFrameTuple, cmp);
+ frameTuple.resetByTupleIndex(this, bestChild);
+ double minArea = area(frameTuple, cmp);
+ if (area < minArea) {
+ bestChild = id;
+ }
+ }
+ }
+ }
+ }
+ } else { // find minimum enlarged area, use minimum area to break tie
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = i;
+ } else if (enlargedArea == minEnlargedArea) {
+ double area = area(frameTuple, cmp);
+ frameTuple.resetByTupleIndex(this, bestChild);
+ double minArea = area(frameTuple, cmp);
+ if (area < minArea) {
+ bestChild = i;
+ }
+ }
+ }
+ }
+ tupleEntries1.clear();
+
+ frameTuple.resetByTupleIndex(this, bestChild);
+ if (minEnlargedArea > 0.0) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int getBestChildPageId(MultiComparator cmp) {
+ return buf.getInt(getChildPointerOff(frameTuple, cmp));
+ }
+
+ @Override
+ public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ int c = pointerCmp(frameTuple, tuple, cmp);
+ if (c == 0) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j));
+ if (c > 0) {
+ return -1;
+ }
+ c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ if (c < 0) {
+ return -1;
+ }
+ }
+ // return buf.getInt(frameTuple.getFieldStart(cmp.getKeyFieldCount()));
+ return buf.getInt(getChildPointerOff(frameTuple, cmp));
+ }
+
+ @Override
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+
+ int c = pointerCmp(frameTuple, tuple, cmp);
+ if (c == 0) {
+ return i;
+ } else {
+ int pageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(frameTuple, cmp));
+ traverseList.add(pageId, -1, parentIndex);
+ }
+ }
+ return -1;
+ }
+
+ @Override
+ public boolean compact(MultiComparator cmp) {
+ resetSpaceParams();
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+
+ int tupleCount = buf.getInt(tupleCountOff);
+ int freeSpace = buf.getInt(freeSpaceOff);
+
+ ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+ sortedTupleOffs.ensureCapacity(tupleCount);
+ for (int i = 0; i < tupleCount; i++) {
+ int slotOff = slotManager.getSlotOff(i);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+ }
+ Collections.sort(sortedTupleOffs);
+
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+ int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+ System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
+
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+ freeSpace += tupleLength;
+ }
+
+ buf.putInt(freeSpaceOff, freeSpace);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
+
+ return false;
+ }
+
+ @Override
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
+ // the
+ // child
+ // pointer
+ if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+ else
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+ }
+
+ @Override
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ if (tupleIndex == -1) {
+ tupleIndex = findTupleByPointer(tuple, cmp);
+ }
+ if (tupleIndex != -1) {
+ tupleWriter.writeTuple(tuple, buf, getTupleOffset(tupleIndex));
+ }
+ }
+
+ private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
+ return cmp.getIntCmp().compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(tupleA, cmp), childPtrSize, tupleB.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(tupleB, cmp), childPtrSize);
+ }
+
+ @Override
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
+ throws Exception {
+
+ rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+
+ // calculations are based on the R*-tree paper
+ int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+ int splitDistribution = getTupleCount() - (2 * m) + 2;
+
+ // to calculate the minimum margin in order to pick the split axis
+ double minMargin = Double.MAX_VALUE;
+ int splitAxis = 0, sortOrder = 0;
+
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ for (int k = 0; k < getTupleCount(); ++k) {
+
+ frameTuple.resetByTupleIndex(this, k);
+
+ double LowerKey = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i));
+ double UpperKey = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j));
+
+ tupleEntries1.add(k, LowerKey);
+ tupleEntries2.add(k, UpperKey);
+ }
+ double LowerKey = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double UpperKey = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j));
+
+ tupleEntries1.add(-1, LowerKey);
+ tupleEntries2.add(-1, UpperKey);
+
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double lowerMargin = 0.0, upperMargin = 0.0;
+ // generate distribution
+ for (int k = 1; k <= splitDistribution; ++k) {
+ int d = m - 1 + k;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries2, rec[1], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+ generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
+
+ // calculate the margin of the distributions
+ lowerMargin += rec[0].margin() + rec[2].margin();
+ upperMargin += rec[1].margin() + rec[3].margin();
+ }
+ double margin = Math.min(lowerMargin, upperMargin);
+
+ // store minimum margin as split axis
+ if (margin < minMargin) {
+ minMargin = margin;
+ splitAxis = i;
+ sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+ }
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
+
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double key = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(splitAxis + sortOrder),
+ frameTuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(i, key);
+ }
+ double key = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(splitAxis + sortOrder),
+ tuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(-1, key);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double minArea = Double.MAX_VALUE;
+ double minOverlap = Double.MAX_VALUE;
+ int splitPoint = 0;
+ for (int i = 1; i <= splitDistribution; ++i) {
+ int d = m - 1 + i;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+
+ double overlap = rec[0].overlappedArea(rec[2]);
+ if (overlap < minOverlap) {
+ splitPoint = d;
+ minOverlap = overlap;
+ minArea = rec[0].area() + rec[2].area();
+ } else if (overlap == minOverlap) {
+ double area = rec[0].area() + rec[2].area();
+ if (area < minArea) {
+ splitPoint = d;
+ minArea = area;
+ }
+ }
+ }
+ int startIndex, endIndex;
+ if (splitPoint < (getTupleCount() + 1) / 2) {
+ startIndex = 0;
+ endIndex = splitPoint;
+ } else {
+ startIndex = splitPoint;
+ endIndex = (getTupleCount() + 1);
+ }
+ boolean tupleInserted = false;
+ int totalBytes = 0, numOfDeletedTuples = 0;
+ for (int i = startIndex; i < endIndex; i++) {
+ if (tupleEntries1.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+ rightFrame.insert(frameTuple, cmp, -1);
+ ((UnorderedSlotManager) slotManager).modifySlot(
+ slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ totalBytes += tupleWriter.bytesRequired(frameTuple) + childPtrSize;
+ numOfDeletedTuples++;
+ } else {
+ rightFrame.insert(tuple, cmp, -1);
+ tupleInserted = true;
+ }
+ }
+
+ ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+
+ // maintain space information
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+ + (slotManager.getSlotSize() * numOfDeletedTuples));
+
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
+
+ if (!tupleInserted) {
+ insert(tuple, cmp, -1);
+ }
+
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+
+ splitKey.initData(splitKeySize);
+ this.adjustMBR(tuples, cmp);
+ rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+ rTreeSplitKey.getRightPageBuffer(), 0);
+ rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ return 0;
+ }
+
+ private int getChildPointerOff(ITupleReference tuple, MultiComparator cmp) {
+ return tuple.getFieldStart(cmp.getKeyFieldCount() - 1) + tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tuple, cmp), buf.array(),
+ freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+
+ }
+
+ @Override
+ public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
+
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + tupleSize + childPtrSize + slotManager.getSlotSize());
+ }
+
+ @Override
+ public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+ frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ if (c != 0) {
+ return true;
+ }
+ c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+ tuple.getFieldLength(j));
+
+ if (c != 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private double overlappedArea(ITupleReference tuple1, ITupleReference tupleToBeInserted, ITupleReference tuple2,
+ MultiComparator cmp) {
+ double area = 1.0;
+ double f1, f2;
+
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ double pHigh1, pLow1;
+ if (tupleToBeInserted != null) {
+ int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+ tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+ if (c < 0) {
+ pLow1 = DoubleSerializerDeserializer.getDouble(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+ } else {
+ pLow1 = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i));
+ }
+
+ c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+ tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+ if (c > 0) {
+ pHigh1 = DoubleSerializerDeserializer.getDouble(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+ } else {
+ pHigh1 = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j));
+ }
+ } else {
+ pLow1 = DoubleSerializerDeserializer.getDouble(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+ pHigh1 = DoubleSerializerDeserializer.getDouble(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+ }
+
+ double pLow2 = DoubleSerializerDeserializer.getDouble(tuple2.getFieldData(i), tuple2.getFieldStart(i));
+ double pHigh2 = DoubleSerializerDeserializer.getDouble(tuple2.getFieldData(j), tuple2.getFieldStart(j));
+
+ if (pLow1 > pHigh2 || pHigh1 < pLow2) {
+ return 0.0;
+ }
+
+ f1 = Math.max(pLow1, pLow2);
+ f2 = Math.min(pHigh1, pHigh2);
+ area *= f2 - f1;
+ }
+ return area;
+ }
+
+ private double enlargedArea(ITupleReference tuple, ITupleReference tupleToBeInserted, MultiComparator cmp) {
+ double areaBeforeEnlarge = area(tuple, cmp);
+ double areaAfterEnlarge = 1.0;
+
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ double pHigh, pLow;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+ tupleToBeInserted.getFieldLength(i));
+ if (c < 0) {
+ pLow = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ } else {
+ pLow = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i));
+ }
+
+ c = cmp.getComparators()[j].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+ tupleToBeInserted.getFieldLength(j));
+ if (c > 0) {
+ pHigh = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j));
+ } else {
+ pHigh = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j));
+ }
+ areaAfterEnlarge *= pHigh - pLow;
+ }
+ return areaAfterEnlarge - areaBeforeEnlarge;
+ }
+
+ private double area(ITupleReference tuple, MultiComparator cmp) {
+ double area = 1.0;
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ area *= DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j))
+ - DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ }
+ return area;
+ }
+
+ @Override
+ public void enlarge(ITupleReference tuple, MultiComparator cmp) {
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+ frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ if (c > 0) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+ tuple.getFieldLength(j));
+ if (c < 0) {
+ System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j), frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), tuple.getFieldLength(j));
+ }
+ }
+ }
+
+ @Override
+ public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i].setFieldCount(cmp.getKeyFieldCount());
+ tuples[i].resetByTupleIndex(this, 0);
+ }
+
+ adjustMBRImpl(tuples, cmp);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
new file mode 100644
index 0000000..9fd5a54
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+
+public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
+
+ private static final long serialVersionUID = 1L;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private int keyFieldCount;
+
+ public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeInteriorFrame createFrame() {
+ return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
new file mode 100644
index 0000000..7ebe974
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -0,0 +1,266 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
+
+public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
+
+ public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ super(tupleWriter, keyFieldCount);
+ }
+
+ @Override
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
+ }
+
+ @Override
+ public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j));
+ if (c > 0) {
+ return false;
+ }
+ c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+
+ if (c < 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
+ throws Exception {
+
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ frameTuple.setFieldCount(cmp.getFieldCount());
+
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+
+ // calculations are based on the R*-tree paper
+ int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+ int splitDistribution = getTupleCount() - (2 * m) + 2;
+
+ // to calculate the minimum margin in order to pick the split axis
+ double minMargin = Double.MAX_VALUE;
+ int splitAxis = 0, sortOrder = 0;
+
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ for (int k = 0; k < getTupleCount(); ++k) {
+
+ frameTuple.resetByTupleIndex(this, k);
+
+ double LowerKey = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i));
+ double UpperKey = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j));
+
+ tupleEntries1.add(k, LowerKey);
+ tupleEntries2.add(k, UpperKey);
+ }
+ double LowerKey = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double UpperKey = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j));
+
+ tupleEntries1.add(-1, LowerKey);
+ tupleEntries2.add(-1, UpperKey);
+
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double lowerMargin = 0.0, upperMargin = 0.0;
+ // generate distribution
+ for (int k = 1; k <= splitDistribution; ++k) {
+ int d = m - 1 + k;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries2, rec[1], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+ generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
+
+ // calculate the margin of the distributions
+ lowerMargin += rec[0].margin() + rec[2].margin();
+ upperMargin += rec[1].margin() + rec[3].margin();
+ }
+ double margin = Math.min(lowerMargin, upperMargin);
+
+ // store minimum margin as split axis
+ if (margin < minMargin) {
+ minMargin = margin;
+ splitAxis = i;
+ sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+ }
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
+
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double key = DoubleSerializerDeserializer.getDouble(frameTuple.getFieldData(splitAxis + sortOrder),
+ frameTuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(i, key);
+ }
+ double key = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(splitAxis + sortOrder),
+ tuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(-1, key);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double minArea = Double.MAX_VALUE;
+ double minOverlap = Double.MAX_VALUE;
+ int splitPoint = 0;
+ for (int i = 1; i <= splitDistribution; ++i) {
+ int d = m - 1 + i;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+
+ double overlap = rec[0].overlappedArea(rec[2]);
+ if (overlap < minOverlap) {
+ splitPoint = d;
+ minOverlap = overlap;
+ minArea = rec[0].area() + rec[2].area();
+ } else if (overlap == minOverlap) {
+ double area = rec[0].area() + rec[2].area();
+ if (area < minArea) {
+ splitPoint = d;
+ minArea = area;
+ }
+ }
+ }
+ int startIndex, endIndex;
+ if (splitPoint < (getTupleCount() + 1) / 2) {
+ startIndex = 0;
+ endIndex = splitPoint;
+ } else {
+ startIndex = splitPoint;
+ endIndex = (getTupleCount() + 1);
+ }
+ boolean tupleInserted = false;
+ int totalBytes = 0, numOfDeletedTuples = 0;
+ for (int i = startIndex; i < endIndex; i++) {
+ if (tupleEntries1.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+ rightFrame.insert(frameTuple, cmp, -1);
+ ((UnorderedSlotManager) slotManager).modifySlot(
+ slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ totalBytes += tupleWriter.bytesRequired(frameTuple);
+ numOfDeletedTuples++;
+ } else {
+ rightFrame.insert(tuple, cmp, -1);
+ tupleInserted = true;
+ }
+ }
+
+ ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+
+ // maintain space information
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+ + (slotManager.getSlotSize() * numOfDeletedTuples));
+
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
+
+ if (!tupleInserted) {
+ insert(tuple, cmp, -1);
+ }
+
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
+
+ splitKey.initData(splitKeySize);
+ this.adjustMBR(tuples, cmp);
+ rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+ rTreeSplitKey.getRightPageBuffer(), 0);
+ rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ return 0;
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
+
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
+
+ @Override
+ public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
+
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
+
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+ }
+
+ @Override
+ public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i].setFieldCount(cmp.getFieldCount());
+ tuples[i].resetByTupleIndex(this, 0);
+ }
+
+ adjustMBRImpl(tuples, cmp);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
new file mode 100644
index 0000000..7da7f26
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+
+public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
+
+ private static final long serialVersionUID = 1L;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private int keyFieldCount;
+
+ public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeLeafFrame createFrame() {
+ return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
similarity index 83%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
rename to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
index d698c8a..8dce1ac 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOp.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
@@ -13,8 +13,8 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
-public enum BTreeOp {
- BTO_INSERT, BTO_DELETE, BTO_UPDATE, BTO_SEARCH
+public enum EntriesOrder {
+ ASCENDING, DESCENDING
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
new file mode 100644
index 0000000..58bad69
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
+
+public class PathList {
+ private IntArrayList pageIds;
+ private IntArrayList pageLsns;
+ private IntArrayList pageIndexes;
+
+ public PathList(int initialCapacity, int growth) {
+ pageIds = new IntArrayList(initialCapacity, growth);
+ pageLsns = new IntArrayList(initialCapacity, growth);
+ pageIndexes = new IntArrayList(initialCapacity, growth);
+ }
+
+ public int size() {
+ return pageIds.size();
+ }
+
+ public int first() {
+ return pageIds.first();
+ }
+
+ public void add(int pageId, int pageLsn, int pageIndex) {
+ pageIds.add(pageId);
+ pageLsns.add(pageLsn);
+ pageIndexes.add(pageIndex);
+ }
+
+ public int getFirstPageId() {
+ return pageIds.getFirst();
+ }
+
+ public int getFirstPageLsn() {
+ return pageLsns.getFirst();
+ }
+
+ public int getFirstPageIndex() {
+ return pageIndexes.getFirst();
+ }
+
+ public int getLastPageId() {
+ return pageIds.getLast();
+ }
+
+ public int getLastPageLsn() {
+ return pageLsns.getLast();
+ }
+
+ public int getLastPageIndex() {
+ return pageIndexes.getLast();
+ }
+
+ public int getPageId(int i) {
+ return pageIds.get(i);
+ }
+
+ public int getPageLsn(int i) {
+ return pageLsns.get(i);
+ }
+
+ public int getPageIndex(int i) {
+ return pageIndexes.get(i);
+ }
+
+ public void setPageLsn(int i, int pageLsn) {
+ pageLsns.set(i, pageLsn);
+ }
+
+ public void moveFirst() {
+ pageIds.moveFirst();
+ pageLsns.moveFirst();
+ pageIndexes.moveFirst();
+ }
+
+ public void moveLast() {
+ pageIds.removeLast();
+ pageLsns.removeLast();
+ pageIndexes.removeLast();
+ }
+
+ public boolean isLast() {
+ return pageIds.isLast();
+ }
+
+ public void clear() {
+ pageIds.clear();
+ pageLsns.clear();
+ pageIndexes.clear();
+ }
+
+ public boolean isEmpty() {
+ return pageIds.isEmpty();
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
new file mode 100644
index 0000000..cf74ff9
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -0,0 +1,938 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import java.util.ArrayList;
+import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class RTree implements ITreeIndex {
+
+ private boolean created = false;
+ private boolean loaded = false;
+ private final int rootPage = 1; // the root page never changes
+
+ private final AtomicInteger globalNsn; // Global node sequence number
+ private int numOfPages = 1;
+ private final ReadWriteLock treeLatch;
+
+ private final IFreePageManager freePageManager;
+ private final IBufferCache bufferCache;
+ private int fileId;
+
+ private final SearchPredicate diskOrderScanPredicate;
+ private final ITreeIndexFrameFactory interiorFrameFactory;
+ private final ITreeIndexFrameFactory leafFrameFactory;
+ private final MultiComparator cmp;
+
+ public int rootSplits = 0;
+ public int[] splitsByLevel = new int[500];
+ public AtomicLong readLatchesAcquired = new AtomicLong();
+ public AtomicLong readLatchesReleased = new AtomicLong();
+ public AtomicLong writeLatchesAcquired = new AtomicLong();
+ public AtomicLong writeLatchesReleased = new AtomicLong();
+ public AtomicLong pins = new AtomicLong();
+ public AtomicLong unpins = new AtomicLong();
+ public byte currentLevel = 0;
+
+ public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.cmp = cmp;
+ globalNsn = new AtomicInteger();
+ this.treeLatch = new ReentrantReadWriteLock(true);
+ this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
+ }
+
+ public void incrementGlobalNsn() {
+ globalNsn.incrementAndGet();
+ }
+
+ public int getGlobalNsn() {
+ return globalNsn.get();
+ }
+
+ public void incrementReadLatchesAcquired() {
+ readLatchesAcquired.incrementAndGet();
+ }
+
+ public void incrementReadLatchesReleased() {
+ readLatchesReleased.incrementAndGet();
+ }
+
+ public void incrementWriteLatchesAcquired() {
+ writeLatchesAcquired.incrementAndGet();
+ }
+
+ public void incrementWriteLatchesReleased() {
+ writeLatchesReleased.incrementAndGet();
+ }
+
+ public void incrementPins() {
+ pins.incrementAndGet();
+ }
+
+ public void incrementUnpins() {
+ unpins.incrementAndGet();
+ }
+
+ public String printStats() {
+ StringBuilder strBuilder = new StringBuilder();
+ strBuilder.append("\n");
+ strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
+ strBuilder.append("SPLITS BY LEVEL\n");
+ for (int i = 0; i < currentLevel; i++) {
+ strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
+ }
+ strBuilder.append(String.format("READ LATCHES: %10d %10d\n", readLatchesAcquired.get(),
+ readLatchesReleased.get()));
+ strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired.get(),
+ writeLatchesReleased.get()));
+ strBuilder.append(String.format("PINS: %10d %10d\n", pins.get(), unpins.get()));
+
+ strBuilder.append(String.format("Num of Pages: %10d\n", numOfPages));
+
+ return strBuilder.toString();
+ }
+
+ public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame, ISerializerDeserializer[] fields)
+ throws Exception {
+ printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
+ }
+
+ public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeFrame leafFrame,
+ IRTreeFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
+
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+
+ try {
+ if (parent != null && unpin == true) {
+ parent.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(parent);
+ incrementUnpins();
+ }
+
+ interiorFrame.setPage(node);
+ int level = interiorFrame.getLevel();
+
+ System.out.format("%1d ", level);
+ System.out.format("%3d ", pageId);
+ for (int i = 0; i < currentLevel - level; i++)
+ System.out.format(" ");
+
+ String keyString;
+ if (interiorFrame.isLeaf()) {
+ leafFrame.setPage(node);
+ keyString = leafFrame.printKeys(cmp, fields);
+ } else {
+ keyString = interiorFrame.printKeys(cmp, fields);
+ }
+
+ System.out.format(keyString);
+ if (!interiorFrame.isLeaf()) {
+ ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame)).getChildren(cmp);
+ for (int i = 0; i < children.size(); i++) {
+ printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
+ }
+ } else {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ } catch (Exception e) {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ throw e;
+ }
+ }
+
+ @Override
+ public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
+ if (created)
+ return;
+
+ treeLatch.writeLock().lock();
+ try {
+ // check if another thread beat us to it
+ if (created)
+ return;
+
+ freePageManager.init(metaFrame, rootPage);
+
+ // initialize root page
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+ incrementPins();
+
+ rootNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ try {
+ leafFrame.setPage(rootNode);
+ leafFrame.initBuffer((byte) 0);
+ } finally {
+ rootNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(rootNode);
+ incrementUnpins();
+ }
+ currentLevel = 0;
+
+ created = true;
+ } finally {
+ treeLatch.writeLock().unlock();
+ }
+ }
+
+ public void open(int fileId) {
+ this.fileId = fileId;
+ }
+
+ public void close() {
+ fileId = -1;
+ }
+
+ @Override
+ public RTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) {
+ return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame, (IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
+ }
+
+ @Override
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
+ ctx.setTuple(tuple);
+ ctx.splitKey.reset();
+ ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+
+ ICachedPage leafNode = findLeaf(ctx);
+
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForInsert(ctx);
+ } else {
+ break;
+ }
+ }
+
+ leafNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(leafNode);
+ incrementUnpins();
+ }
+
+ public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
+ int pageId = rootPage;
+ boolean writeLatched = false;
+ ICachedPage node = null;
+ boolean isLeaf = false;
+ int pageLsn = 0, parentLsn = 0;
+
+ while (true) {
+ if (!writeLatched) {
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ ctx.interiorFrame.setPage(node);
+ isLeaf = ctx.interiorFrame.isLeaf();
+ if (isLeaf) {
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ writeLatched = true;
+
+ if (!ctx.interiorFrame.isLeaf()) {
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
+ continue;
+ }
+ } else {
+ // Be optimistic and grab read latch first. We will swap it
+ // to write latch if we need to enlarge the best child
+ // tuple.
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ }
+ }
+
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ // Concurrent split detected, go back to parent and re-choose
+ // the best child
+ if (writeLatched) {
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
+ } else {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+
+ pageId = ctx.pathList.getLastPageId();
+ if (pageId != rootPage) {
+ parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
+ }
+ ctx.pathList.moveLast();
+ continue;
+ }
+
+ pageLsn = ctx.interiorFrame.getPageLsn();
+ ctx.pathList.add(pageId, pageLsn, -1);
+
+ if (!isLeaf) {
+ // findBestChild must be called *before* getBestChildPageId
+ ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
+ int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
+
+ if (!writeLatched) {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ // TODO: do we need to un-pin and pin again?
+ bufferCache.unpin(node);
+ incrementUnpins();
+
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ writeLatched = true;
+
+ if (ctx.interiorFrame.getPageLsn() != pageLsn) {
+ // The page was changed while we unlocked it; thus,
+ // retry (re-choose best child)
+
+ ctx.pathList.moveLast();
+ continue;
+ }
+ }
+
+ // We don't need to reset the frameTuple because it is
+ // already pointing to the best child
+ ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
+
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
+
+ pageId = childPageId;
+ parentLsn = pageLsn;
+ } else {
+ ctx.leafFrame.setPage(node);
+ return node;
+ }
+ }
+ }
+
+ private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
+ throws Exception {
+ FrameOpSpaceStatus spaceStatus;
+ if (!isLeaf) {
+ spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+ } else {
+ spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ }
+
+ switch (spaceStatus) {
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ if (!isLeaf) {
+ ctx.interiorFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
+ } else {
+ ctx.leafFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
+ }
+ ctx.splitKey.reset();
+ break;
+ }
+
+ case SUFFICIENT_SPACE: {
+ if (!isLeaf) {
+ ctx.interiorFrame.compact(cmp);
+ ctx.interiorFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
+ } else {
+ ctx.leafFrame.compact(cmp);
+ ctx.leafFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
+ }
+ ctx.splitKey.reset();
+ break;
+ }
+
+ case INSUFFICIENT_SPACE: {
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+ incrementPins();
+ rightNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+
+ try {
+ IRTreeFrame rightFrame;
+ int ret;
+ numOfPages++; // debug
+ if (!isLeaf) {
+ splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
+ rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+ rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ret = ctx.interiorFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+ ctx.interiorFrame.setRightPage(rightPageId);
+ rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ rightFrame.setPageLsn(newNsn);
+ ctx.interiorFrame.setPageNsn(newNsn);
+ ctx.interiorFrame.setPageLsn(newNsn);
+ } else {
+ splitsByLevel[0]++; // debug
+ rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+ ctx.leafFrame.setRightPage(rightPageId);
+ rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ rightFrame.setPageLsn(newNsn);
+ ctx.leafFrame.setPageNsn(newNsn);
+ ctx.leafFrame.setPageLsn(newNsn);
+ }
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ if (pageId == rootPage) {
+ rootSplits++; // debug
+ splitsByLevel[currentLevel]++;
+ currentLevel++;
+
+ int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
+ true);
+ incrementPins();
+ newLeftNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ try {
+ // copy left child to new left child
+ System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0,
+ newLeftNode.getBuffer().capacity());
+
+ // initialize new root (leftNode becomes new root)
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
+
+ ctx.splitKey.setLeftPage(newLeftId);
+
+ ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), cmp, -1);
+ ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), cmp, -1);
+
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ ctx.interiorFrame.setPageLsn(newNsn);
+ ctx.interiorFrame.setPageNsn(newNsn);
+ } finally {
+ newLeftNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(newLeftNode);
+ incrementUnpins();
+ }
+
+ ctx.splitKey.reset();
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(rightNode);
+ incrementUnpins();
+ }
+ break;
+ }
+ }
+ }
+
+ public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
+ int parentId = ctx.pathList.getLastPageId();
+ ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ boolean foundParent = true;
+
+ if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+ foundParent = false;
+ while (true) {
+ if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp) != -1) {
+ // found the parent
+ foundParent = true;
+ break;
+ }
+ int rightPage = ctx.interiorFrame.getRightPage();
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+
+ if (rightPage == -1) {
+ break;
+ }
+
+ parentId = rightPage;
+ parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ }
+ }
+ if (foundParent) {
+ ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
+ insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
+ ctx.pathList.moveLast();
+
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+ return;
+ }
+
+ // very rare situation when the there is a root split, do an exhaustive
+ // breadth-first traversal looking for the parent tuple
+
+ ctx.pathList.clear();
+ ctx.traverseList.clear();
+ findPath(ctx);
+ updateParentForInsert(ctx);
+ }
+
+ public void findPath(RTreeOpContext ctx) throws Exception {
+ int pageId = rootPage;
+ int parentIndex = -1;
+ int parentLsn = 0;
+ int pageLsn, pageIndex;
+ ctx.traverseList.add(pageId, -1, parentIndex);
+ while (!ctx.traverseList.isLast()) {
+ pageId = ctx.traverseList.getFirstPageId();
+ parentIndex = ctx.traverseList.getFirstPageIndex();
+
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ pageLsn = ctx.interiorFrame.getPageLsn();
+ pageIndex = ctx.traverseList.first();
+ ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+
+ ctx.traverseList.moveFirst();
+
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ int rightPage = ctx.interiorFrame.getRightPage();
+ if (rightPage != -1) {
+ ctx.traverseList.add(rightPage, -1, parentIndex);
+ }
+ }
+ parentLsn = pageLsn;
+
+ if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex, cmp) != -1) {
+ fillPath(ctx, pageIndex);
+
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ return;
+ }
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ }
+
+ public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
+ if (pageIndex != -1) {
+ fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
+ ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
+ }
+ }
+
+ @Override
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
+ ctx.setTuple(tuple);
+ ctx.splitKey.reset();
+ ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+
+ int tupleIndex = findTupleToDelete(ctx);
+
+ if (tupleIndex != -1) {
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ deleteTuple(pageId, tupleIndex, ctx);
+
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForDelete(ctx);
+ } else {
+ break;
+ }
+ }
+
+ ctx.leafFrame.getPage().releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(ctx.leafFrame.getPage());
+ incrementUnpins();
+ }
+ }
+
+ public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
+ int parentId = ctx.pathList.getLastPageId();
+ ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ boolean foundParent = true;
+ int tupleIndex = -1;
+
+ if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+ foundParent = false;
+ while (true) {
+ tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+ if (tupleIndex != -1) {
+ // found the parent
+ foundParent = true;
+ break;
+ }
+ int rightPage = ctx.interiorFrame.getRightPage();
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+
+ if (rightPage == -1) {
+ break;
+ }
+
+ parentId = rightPage;
+ parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ }
+ }
+ if (foundParent) {
+ if (tupleIndex == -1) {
+ tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+ }
+ boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+
+ if (recomputeMBR) {
+ ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+ ctx.pathList.moveLast();
+
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
+
+ ctx.splitKey.reset();
+ if (!ctx.pathList.isEmpty()) {
+ ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
+ ctx.splitKey.setLeftPage(parentId);
+ }
+ } else {
+ ctx.pathList.moveLast();
+ ctx.splitKey.reset();
+ }
+
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+ return;
+ }
+
+ // very rare situation when the there is a root split, do an exhaustive
+ // breadth-first traversal looking for the parent tuple
+
+ ctx.pathList.clear();
+ ctx.traverseList.clear();
+ findPath(ctx);
+ updateParentForDelete(ctx);
+ }
+
+ public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
+
+ ctx.traverseList.add(rootPage, -1, -1);
+ ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
+
+ while (!ctx.pathList.isEmpty()) {
+ int pageId = ctx.pathList.getLastPageId();
+ int parentLsn = ctx.pathList.getLastPageLsn();
+ int pageIndex = ctx.pathList.getLastPageIndex();
+ ctx.pathList.moveLast();
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ boolean isLeaf = ctx.interiorFrame.isLeaf();
+ int pageLsn = ctx.interiorFrame.getPageLsn();
+ int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
+ ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ // Concurrent split detected, we need to visit the right page
+ int rightPage = ctx.interiorFrame.getRightPage();
+ if (rightPage != -1) {
+ ctx.traverseList.add(rightPage, -1, parentIndex);
+ ctx.pathList.add(rightPage, parentLsn, ctx.traverseList.size() - 1);
+ }
+ }
+
+ if (!isLeaf) {
+ for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
+ int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
+ if (childPageId != -1) {
+ ctx.traverseList.add(childPageId, -1, pageIndex);
+ ctx.pathList.add(childPageId, pageLsn, ctx.traverseList.size() - 1);
+ }
+ }
+ } else {
+ ctx.leafFrame.setPage(node);
+ int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+ if (tupleIndex != -1) {
+
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.leafFrame.setPage(node);
+
+ if (ctx.leafFrame.getPageLsn() != pageLsn) {
+ // The page was changed while we unlocked it
+
+ tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+ if (tupleIndex == -1) {
+ ctx.traverseList.add(pageId, -1, parentIndex);
+ ctx.pathList.add(pageId, parentLsn, ctx.traverseList.size() - 1);
+ } else {
+ ctx.pathList.clear();
+ fillPath(ctx, pageIndex);
+ return tupleIndex;
+ }
+ } else {
+ ctx.pathList.clear();
+ fillPath(ctx, pageIndex);
+ return tupleIndex;
+ }
+ }
+ }
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ return -1;
+ }
+
+ public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx) throws Exception {
+ ctx.leafFrame.delete(tupleIndex, cmp);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
+
+ // if the page is empty, just leave it there for future inserts
+ if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
+ ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
+ ctx.splitKey.setLeftPage(pageId);
+ }
+ }
+
+ public void search(ITreeIndexCursor cursor, SearchPredicate pred, RTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.cursor = cursor;
+
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ ctx.cursorInitialState.setRootPage(rootPage);
+ ctx.cursor.open(ctx.cursorInitialState, pred);
+ }
+
+ public ITreeIndexFrameFactory getInteriorFrameFactory() {
+ return interiorFrameFactory;
+ }
+
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
+ return leafFrameFactory;
+ }
+
+ public MultiComparator getCmp() {
+ return cmp;
+ }
+
+ public IFreePageManager getFreePageManager() {
+ return freePageManager;
+ }
+
+ @Override
+ public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ throw new Exception("RTree Update not implemented.");
+ }
+
+ public final class BulkLoadContext implements IIndexBulkLoadContext {
+
+ public RTreeOpContext insertOpCtx;
+
+ public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+
+ insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ }
+ }
+
+ @Override
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
+ ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ // throw new HyracksDataException("RTree Bulkload not implemented.");
+
+ if (loaded)
+ throw new HyracksDataException("Trying to bulk-load RTree but has RTree already been loaded.");
+
+ BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame,
+ metaFrame);
+ return ctx;
+ }
+
+ @Override
+ public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
+ // throw new HyracksDataException("RTree Bulkload not implemented.");
+
+ try {
+ insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+ // throw new HyracksDataException("RTree Bulkload not implemented.");
+
+ loaded = true;
+ }
+
+ @Override
+ public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+ IndexOpContext ictx) throws HyracksDataException {
+ TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
+
+ int currentPageId = rootPage + 1;
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
+
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+ page.acquireReadLatch();
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ cursor.setCurrentPageId(currentPageId);
+ cursor.setMaxPageId(maxPageId);
+ ctx.cursorInitialState.setPage(page);
+ cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+ }
+
+ @Override
+ public int getRootPageId() {
+ return rootPage;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return cmp.getFieldCount();
+ }
+
+ @Override
+ public IndexType getIndexType() {
+ return IndexType.RTREE;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
new file mode 100644
index 0000000..8b94a04
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class RTreeCursorInitialState implements ICursorInitialState {
+
+ private PathList pathList;
+ private int rootPage;
+ private ICachedPage page; // for disk order scan
+
+ public RTreeCursorInitialState(PathList pathList, int rootPage) {
+ this.pathList = pathList;
+ this.rootPage = rootPage;
+ }
+
+ public PathList getPathList() {
+ return pathList;
+ }
+
+ public int getRootPage() {
+ return rootPage;
+ }
+
+ public void setRootPage(int rootPage) {
+ this.rootPage = rootPage;
+ }
+
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ public void setPage(ICachedPage page) {
+ this.page = page;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
new file mode 100644
index 0000000..ea8af28
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+
+public final class RTreeOpContext implements IndexOpContext {
+ public final IndexOp op;
+ public final IRTreeInteriorFrame interiorFrame;
+ public final IRTreeLeafFrame leafFrame;
+ public ITreeIndexCursor cursor;
+ public RTreeCursorInitialState cursorInitialState;
+ public final ITreeIndexMetaDataFrame metaFrame;
+ public final RTreeSplitKey splitKey;
+ public ITupleReference tuple;
+ public final PathList pathList; // used to record the pageIds and pageLsns
+ // of the visited pages
+ public final PathList traverseList; // used for traversing the tree
+ private static final int initTraverseListSize = 100;
+
+ public RTreeOpContext(IndexOp op, IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
+ this.op = op;
+ this.interiorFrame = interiorFrame;
+ this.leafFrame = leafFrame;
+ this.metaFrame = metaFrame;
+ pathList = new PathList(treeHeightHint, treeHeightHint);
+ if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
+ splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter().createTupleReference(), interiorFrame
+ .getTupleWriter().createTupleReference());
+ traverseList = new PathList(initTraverseListSize, initTraverseListSize);
+ } else {
+ splitKey = null;
+ traverseList = null;
+ cursorInitialState = new RTreeCursorInitialState(pathList, 1);
+ }
+ }
+
+ public ITupleReference getTuple() {
+ return tuple;
+ }
+
+ public void setTuple(ITupleReference tuple) {
+ this.tuple = tuple;
+ }
+
+ public void reset() {
+ if (pathList != null) {
+ pathList.clear();
+ }
+ if (traverseList != null) {
+ traverseList.clear();
+ }
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
new file mode 100644
index 0000000..4edcaf8
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class RTreeSearchCursor implements ITreeIndexCursor {
+
+ private int fileId = -1;
+ private ICachedPage page = null;
+ private IRTreeInteriorFrame interiorFrame = null;
+ private IRTreeLeafFrame leafFrame = null;
+ private IBufferCache bufferCache = null;
+
+ private SearchPredicate pred;
+ private PathList pathList;
+ private int rootPage;
+ ITupleReference searchKey;
+
+ private int tupleIndex = 0;
+ private int tupleIndexInc = 0;
+
+ private MultiComparator cmp;
+
+ private ITreeIndexTupleReference frameTuple;
+
+ private int pin = 0;
+ private int unpin = 0;
+
+ public RTreeSearchCursor(IRTreeInteriorFrame interiorFrame, IRTreeLeafFrame leafFrame) {
+ this.interiorFrame = interiorFrame;
+ this.leafFrame = leafFrame;
+ this.frameTuple = leafFrame.createTupleReference();
+ }
+
+ @Override
+ public void close() throws Exception {
+ if (page != null) {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ tupleIndex = 0;
+ tupleIndexInc = 0;
+ page = null;
+ pathList = null;
+ }
+
+ public ITupleReference getTuple() {
+ return frameTuple;
+ }
+
+ @Override
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ public boolean fetchNextLeafPage() throws HyracksDataException {
+ while (!pathList.isEmpty()) {
+ int pageId = pathList.getLastPageId();
+ int parentLsn = pathList.getLastPageLsn();
+ pathList.moveLast();
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pin++;
+ node.acquireReadLatch();
+ interiorFrame.setPage(node);
+ boolean isLeaf = interiorFrame.isLeaf();
+ int pageLsn = interiorFrame.getPageLsn();
+
+ if (pageId != rootPage && parentLsn < interiorFrame.getPageNsn()) {
+ // Concurrent split detected, we need to visit the right page
+ int rightPage = interiorFrame.getRightPage();
+ if (rightPage != -1) {
+ pathList.add(rightPage, parentLsn, -1);
+ }
+ }
+
+ if (!isLeaf) {
+ for (int i = 0; i < interiorFrame.getTupleCount(); i++) {
+ int childPageId = interiorFrame.getChildPageIdIfIntersect(searchKey, i, cmp);
+ if (childPageId != -1) {
+ pathList.add(childPageId, pageLsn, -1);
+ }
+ }
+ } else {
+ if (page != null) {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ unpin++;
+ }
+ page = node;
+ leafFrame.setPage(page);
+ tupleIndex = 0;
+ return true;
+ }
+ node.releaseReadLatch();
+ bufferCache.unpin(node);
+ unpin++;
+ }
+ return false;
+ }
+
+ @Override
+ public boolean hasNext() throws Exception {
+ if (page == null) {
+ return false;
+ }
+
+ if (tupleIndex == leafFrame.getTupleCount()) {
+ if (!fetchNextLeafPage()) {
+ return false;
+ }
+ }
+
+ do {
+ for (int i = tupleIndex; i < leafFrame.getTupleCount(); i++) {
+ if (leafFrame.intersect(searchKey, i, cmp)) {
+ frameTuple.resetByTupleIndex(leafFrame, i);
+ tupleIndexInc = i + 1;
+ return true;
+ }
+ }
+ } while (fetchNextLeafPage());
+ return false;
+ }
+
+ @Override
+ public void next() throws Exception {
+ tupleIndex = tupleIndexInc;
+ }
+
+ @Override
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws Exception {
+ // in case open is called multiple times without closing
+ if (this.page != null) {
+ this.page.releaseReadLatch();
+ bufferCache.unpin(this.page);
+ pathList.clear();
+ }
+
+ pathList = ((RTreeCursorInitialState) initialState).getPathList();
+ rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
+
+ pred = (SearchPredicate) searchPred;
+ cmp = pred.getLowKeyComparator();
+ searchKey = pred.getSearchKey();
+
+ pathList.add(this.rootPage, -1, -1);
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ tupleIndex = 0;
+ fetchNextLeafPage();
+ }
+
+ @Override
+ public void reset() {
+ try {
+ close();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void setBufferCache(IBufferCache bufferCache) {
+ this.bufferCache = bufferCache;
+ }
+
+ @Override
+ public void setFileId(int fileId) {
+ this.fileId = fileId;
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
new file mode 100644
index 0000000..f220ea3
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+
+public class RTreeSplitKey implements ISplitKey {
+ public byte[] leftPageData = null;
+ public ByteBuffer leftPageBuf = null;
+ public ITreeIndexTupleReference leftTuple;
+
+ public byte[] rightPageData = null;
+ public ByteBuffer rightPageBuf = null;
+ public ITreeIndexTupleReference rightTuple;
+
+ public int keySize = 0;
+
+ public RTreeSplitKey(ITreeIndexTupleReference leftTuple, ITreeIndexTupleReference rightTuple) {
+ this.leftTuple = leftTuple;
+ this.rightTuple = rightTuple;
+ }
+
+ public void initData(int keySize) {
+ // try to reuse existing memory from a lower-level split if possible
+ this.keySize = keySize;
+ if (leftPageData != null) {
+ if (leftPageData.length < keySize + 4) {
+ leftPageData = new byte[keySize + 4]; // add 4 for the page
+ leftPageBuf = ByteBuffer.wrap(leftPageData);
+ }
+ } else {
+ leftPageData = new byte[keySize + 4]; // add 4 for the page
+ leftPageBuf = ByteBuffer.wrap(leftPageData);
+ }
+ if (rightPageData != null) {
+ if (rightPageData.length < keySize + 4) {
+ rightPageData = new byte[keySize + 4]; // add 4 for the page
+ rightPageBuf = ByteBuffer.wrap(rightPageData);
+ }
+ } else {
+ rightPageData = new byte[keySize + 4]; // add 4 for the page
+ rightPageBuf = ByteBuffer.wrap(rightPageData);
+ }
+
+ leftTuple.resetByTupleOffset(leftPageBuf, 0);
+ rightTuple.resetByTupleOffset(rightPageBuf, 0);
+ }
+
+ public void resetLeftPage() {
+ leftPageData = null;
+ leftPageBuf = null;
+ }
+
+ public void resetRightPage() {
+ rightPageData = null;
+ rightPageBuf = null;
+ }
+
+ public ByteBuffer getLeftPageBuffer() {
+ return leftPageBuf;
+ }
+
+ public ByteBuffer getRightPageBuffer() {
+ return rightPageBuf;
+ }
+
+ public ITreeIndexTupleReference getLeftTuple() {
+ return leftTuple;
+ }
+
+ public ITreeIndexTupleReference getRightTuple() {
+ return rightTuple;
+ }
+
+ public int getLeftPage() {
+ return leftPageBuf.getInt(keySize);
+ }
+
+ public int getRightPage() {
+ return rightPageBuf.getInt(keySize);
+ }
+
+ public void setLeftPage(int page) {
+ leftPageBuf.putInt(keySize, page);
+ }
+
+ public void setRightPage(int page) {
+ rightPageBuf.putInt(keySize, page);
+ }
+
+ public ISplitKey duplicate(ITreeIndexTupleReference copyLeftTuple, ITreeIndexTupleReference copyRightTuple) {
+ RTreeSplitKey copy = new RTreeSplitKey(copyLeftTuple, copyRightTuple);
+ copy.leftPageData = leftPageData.clone();
+ copy.leftPageBuf = ByteBuffer.wrap(copy.leftPageData);
+ copy.leftTuple.setFieldCount(leftTuple.getFieldCount());
+ copy.leftTuple.resetByTupleOffset(copy.leftPageBuf, 0);
+
+ copy.rightPageData = rightPageData.clone();
+ copy.rightPageBuf = ByteBuffer.wrap(copy.rightPageData);
+ copy.rightTuple.setFieldCount(rightTuple.getFieldCount());
+ copy.rightTuple.resetByTupleOffset(copy.rightPageBuf, 0);
+ return copy;
+ }
+
+ @Override
+ public void reset() {
+ leftPageData = null;
+ leftPageBuf = null;
+ rightPageData = null;
+ rightPageBuf = null;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public ITreeIndexTupleReference getTuple() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ @Override
+ public void setPages(int leftPage, int rightPage) {
+ leftPageBuf.putInt(keySize, leftPage);
+ rightPageBuf.putInt(keySize, rightPage);
+ }
+
+ @Override
+ public ISplitKey duplicate(ITreeIndexTupleReference copyTuple) {
+ // TODO Auto-generated method stub
+ return null;
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
new file mode 100644
index 0000000..1fb86a9
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+
+public class Rectangle {
+ private int dim;
+ private double[] low;
+ private double[] high;
+
+ public Rectangle(int dim) {
+ this.dim = dim;
+ low = new double[this.dim];
+ high = new double[this.dim];
+ }
+
+ public int getDim() {
+ return dim;
+ }
+
+ public double getLow(int i) {
+ return low[i];
+ }
+
+ public double getHigh(int i) {
+ return high[i];
+ }
+
+ public void setLow(int i, double value) {
+ low[i] = value;
+ }
+
+ public void setHigh(int i, double value) {
+ high[i] = value;
+ }
+
+ public void set(ITupleReference tuple) {
+ for (int i = 0; i < getDim(); i++) {
+ int j = i + getDim();
+ setLow(i, DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i)));
+ setHigh(i, DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j)));
+ }
+ }
+
+ public void enlarge(ITupleReference tupleToBeInserted) {
+ for (int i = 0; i < getDim(); i++) {
+ int j = getDim() + i;
+ double low = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i));
+ if (getLow(i) > low) {
+ setLow(i, low);
+ }
+ double high = DoubleSerializerDeserializer.getDouble(tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j));
+ if (getHigh(i) < high) {
+ setHigh(i, high);
+ }
+ }
+ }
+
+ public double margin() {
+ double margin = 0.0;
+ double mul = Math.pow(2, (double) getDim() - 1.0);
+ for (int i = 0; i < getDim(); i++) {
+ margin += (getHigh(i) - getLow(i)) * mul;
+ }
+ return margin;
+ }
+
+ public double overlappedArea(ITupleReference tuple) {
+ double area = 1.0;
+ double f1, f2;
+
+ for (int i = 0; i < getDim(); i++) {
+ int j = getDim() + i;
+ double low = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double high = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j));
+ if (getLow(i) > high || getHigh(i) < low) {
+ return 0.0;
+ }
+ f1 = Math.max(getLow(i), low);
+ f2 = Math.min(getHigh(i), high);
+ area *= f2 - f1;
+ }
+ return area;
+ }
+
+ public double overlappedArea(Rectangle rec) {
+ double area = 1.0;
+ double f1, f2;
+
+ for (int i = 0; i < getDim(); i++) {
+ if (getLow(i) > rec.getHigh(i) || getHigh(i) < rec.getLow(i)) {
+ return 0.0;
+ }
+
+ f1 = Math.max(getLow(i), rec.getLow(i));
+ f2 = Math.min(getHigh(i), rec.getHigh(i));
+ area *= f2 - f1;
+ }
+ return area;
+ }
+
+ public double area(ITupleReference tuple) {
+ double area = 1.0;
+ for (int i = 0; i < getDim(); i++) {
+ int j = getDim() + i;
+ area *= DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j))
+ - DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ }
+ return area;
+ }
+
+ public double area() {
+ double area = 1.0;
+ for (int i = 0; i < getDim(); i++) {
+ area *= getHigh(i) - getLow(i);
+ }
+ return area;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
new file mode 100644
index 0000000..cd3a0ef
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
@@ -0,0 +1,49 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class SearchPredicate implements ISearchPredicate {
+
+ private static final long serialVersionUID = 1L;
+
+ protected ITupleReference searchKey;
+ protected MultiComparator cmp;
+
+ public SearchPredicate(ITupleReference searchKey, MultiComparator cmp) {
+ this.searchKey = searchKey;
+ this.cmp = cmp;
+ }
+
+ public ITupleReference getSearchKey() {
+ return searchKey;
+ }
+
+ public void setSearchKey(ITupleReference searchKey) {
+ this.searchKey = searchKey;
+ }
+
+ public MultiComparator getLowKeyComparator() {
+ return cmp;
+ }
+
+ public MultiComparator getHighKeyComparator() {
+ return cmp;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
new file mode 100644
index 0000000..0cb3de8
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
+
+public class TupleEntry implements Comparable<TupleEntry> {
+ private int tupleIndex;
+ private double value;
+
+ public TupleEntry() {
+ }
+
+ public int getTupleIndex() {
+ return tupleIndex;
+ }
+
+ public void setTupleIndex(int tupleIndex) {
+ this.tupleIndex = tupleIndex;
+ }
+
+ public double getValue() {
+ return value;
+ }
+
+ public void setValue(double value) {
+ this.value = value;
+ }
+
+ public int compareTo(TupleEntry tupleEntry) {
+ double cmp = this.getValue() - tupleEntry.getValue();
+ if (cmp > RTreeNSMFrame.doubleEpsilon())
+ return 1;
+ cmp = tupleEntry.getValue() - this.getValue();
+ if (cmp > RTreeNSMFrame.doubleEpsilon())
+ return -1;
+ return 0;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
new file mode 100644
index 0000000..8be8251
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
@@ -0,0 +1,79 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import java.util.Arrays;
+import java.util.Collections;
+
+public class TupleEntryArrayList {
+ private TupleEntry[] data;
+ private int size;
+ private final int growth;
+
+ public TupleEntryArrayList(int initialCapacity, int growth) {
+ data = new TupleEntry[initialCapacity];
+ size = 0;
+ this.growth = growth;
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public void add(int tupleIndex, double value) {
+ if (size == data.length) {
+ TupleEntry[] newData = new TupleEntry[data.length + growth];
+ System.arraycopy(data, 0, newData, 0, data.length);
+ data = newData;
+ }
+ if (data[size] == null) {
+ data[size] = new TupleEntry();
+ }
+ data[size].setTupleIndex(tupleIndex);
+ data[size].setValue(value);
+ size++;
+ }
+
+ public void removeLast() {
+ if (size > 0)
+ size--;
+ }
+
+ // WARNING: caller is responsible for checking size > 0
+ public TupleEntry getLast() {
+ return data[size - 1];
+ }
+
+ public TupleEntry get(int i) {
+ return data[i];
+ }
+
+ public void clear() {
+ size = 0;
+ }
+
+ public boolean isEmpty() {
+ return size == 0;
+ }
+
+ public void sort(EntriesOrder order, int tupleCount) {
+ if (order == EntriesOrder.ASCENDING) {
+ Arrays.sort(data, 0, tupleCount);
+ } else {
+ Arrays.sort(data, 0, tupleCount, Collections.reverseOrder());
+ }
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
new file mode 100644
index 0000000..d2b1c53
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.frames.AbstractSlotManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
+
+public class UnorderedSlotManager extends AbstractSlotManager {
+ @Override
+ public int findTupleIndex(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, MultiComparator multiCmp,
+ FindTupleMode mode, FindTupleNoExactMatchPolicy matchPolicy) {
+
+ int maxFieldPos = multiCmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < frame.getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(frame, i);
+
+ boolean foundTuple = true;
+ for (int j = 0; j < maxFieldPos; j++) {
+ int k = maxFieldPos + j;
+ int c1 = multiCmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), searchKey.getFieldData(j), searchKey.getFieldStart(j),
+ searchKey.getFieldLength(j));
+
+ if (c1 != 0) {
+ foundTuple = false;
+ break;
+ }
+ int c2 = multiCmp.getComparators()[k].compare(frameTuple.getFieldData(k), frameTuple.getFieldStart(k),
+ frameTuple.getFieldLength(k), searchKey.getFieldData(k), searchKey.getFieldStart(k),
+ searchKey.getFieldLength(k));
+ if (c2 != 0) {
+ foundTuple = false;
+ break;
+ }
+ }
+ int remainingFieldCount = multiCmp.getFieldCount() - multiCmp.getKeyFieldCount();
+ for (int j = multiCmp.getKeyFieldCount(); j < multiCmp.getKeyFieldCount() + remainingFieldCount; j++) {
+ if (!compareField(searchKey, frameTuple, j)) {
+ foundTuple = false;
+ break;
+ }
+ }
+ if (foundTuple) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ public boolean compareField(ITupleReference searchKey, ITreeIndexTupleReference frameTuple, int fIdx) {
+ int searchKeyFieldLength = searchKey.getFieldLength(fIdx);
+ int frameTupleFieldLength = frameTuple.getFieldLength(fIdx);
+
+ if (searchKeyFieldLength != frameTupleFieldLength) {
+ return false;
+ }
+
+ for (int i = 0; i < searchKeyFieldLength; i++) {
+ if (searchKey.getFieldData(fIdx)[i + searchKey.getFieldStart(fIdx)] != frameTuple.getFieldData(fIdx)[i
+ + frameTuple.getFieldStart(fIdx)]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public int insertSlot(int tupleIndex, int tupleOff) {
+ int slotOff = getSlotEndOff() - slotSize;
+ setSlot(slotOff, tupleOff);
+ return slotOff;
+ }
+
+ public void modifySlot(int slotOff, int tupleOff) {
+ setSlot(slotOff, tupleOff);
+ }
+
+ public void deleteEmptySlots() {
+ int slotOff = getSlotStartOff();
+ while (slotOff >= getSlotEndOff()) {
+ if (frame.getBuffer().getInt(slotOff) == -1) {
+ while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ }
+ if (slotOff > getSlotEndOff()) {
+ System.arraycopy(frame.getBuffer().array(), getSlotEndOff(), frame.getBuffer().array(), slotOff,
+ slotSize);
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ } else {
+ break;
+ }
+ }
+ slotOff -= slotSize;
+ }
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
new file mode 100644
index 0000000..96820c9
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.tuples;
+
+import java.nio.ByteBuffer;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+
+public class RTreeTypeAwareTupleWriter extends TypeAwareTupleWriter {
+
+ public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+ super(typeTraits);
+ }
+
+ public int writeTupleFields(ITreeIndexTupleReference[] refs, int startField, ByteBuffer targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(refs.length);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+
+ // write field slots for variable length fields
+ // since the r-tree has fixed length keys, we don't actually need this?
+ encDec.reset(targetBuf.array(), runner);
+ for (int i = startField; i < startField + refs.length; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(refs[i].getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
+
+ // write data
+ for (int i = 0; i < refs.length; i++) {
+ System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i), targetBuf.array(), runner,
+ refs[i].getFieldLength(i));
+ runner += refs[i].getFieldLength(i);
+ }
+ return runner - targetOff;
+
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
similarity index 64%
copy from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
copy to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
index bfaa120..e2e99c7 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/tuples/TypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
@@ -13,24 +13,24 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.tuples;
+package edu.uci.ics.hyracks.storage.am.rtree.tuples;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class TypeAwareTupleWriterFactory implements IBTreeTupleWriterFactory {
+public class RTreeTypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
private static final long serialVersionUID = 1L;
private ITypeTrait[] typeTraits;
- public TypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+ public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
this.typeTraits = typeTraits;
}
@Override
- public IBTreeTupleWriter createTupleWriter() {
- return new TypeAwareTupleWriter(typeTraits);
+ public ITreeIndexTupleWriter createTupleWriter() {
+ return new RTreeTypeAwareTupleWriter(typeTraits);
}
}
diff --git a/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs b/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs
index 33fff33..450f5c4 100644
--- a/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-storage-common/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Tue Aug 24 14:59:46 PDT 2010
+#Fri May 20 19:34:04 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
index c2693ab..f9ac4d5 100644
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
+++ b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/BufferCache.java
@@ -39,6 +39,8 @@
private static final int MAX_VICTIMIZATION_TRY_COUNT = 3;
+ private final int maxOpenFiles;
+
private final IIOManager ioManager;
private final int pageSize;
private final int numPages;
@@ -52,10 +54,11 @@
private boolean closed;
public BufferCache(IIOManager ioManager, ICacheMemoryAllocator allocator,
- IPageReplacementStrategy pageReplacementStrategy, IFileMapManager fileMapManager, int pageSize, int numPages) {
+ IPageReplacementStrategy pageReplacementStrategy, IFileMapManager fileMapManager, int pageSize, int numPages, int maxOpenFiles) {
this.ioManager = ioManager;
this.pageSize = pageSize;
this.numPages = numPages;
+ this.maxOpenFiles = maxOpenFiles;
pageReplacementStrategy.setBufferCache(this);
ByteBuffer[] buffers = allocator.allocate(pageSize, numPages);
cachedPages = new CachedPage[buffers.length];
@@ -84,11 +87,50 @@
return numPages;
}
- @Override
- public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
+ private void pinSanityCheck(long dpid) throws HyracksDataException {
if (closed) {
throw new HyracksDataException("pin called on a closed cache");
}
+
+ // check whether file has been created and opened
+ int fileId = BufferedFileHandle.getFileId(dpid);
+ BufferedFileHandle fInfo = fileInfoMap.get(fileId);
+ if(fInfo == null) {
+ throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been created.");
+ } else if(fInfo.getReferenceCount() <= 0) {
+ throw new HyracksDataException("pin called on a fileId " + fileId + " that has not been opened.");
+ }
+ }
+
+ @Override
+ public ICachedPage tryPin(long dpid) throws HyracksDataException {
+ pinSanityCheck(dpid);
+
+ CachedPage cPage = null;
+ int hash = hash(dpid);
+ CacheBucket bucket = pageMap[hash];
+ bucket.bucketLock.lock();
+ try {
+ cPage = bucket.cachedPage;
+ while (cPage != null) {
+ if (cPage.dpid == dpid) {
+ cPage.pinCount.incrementAndGet();
+ pageReplacementStrategy.notifyCachePageAccess(cPage);
+ return cPage;
+ }
+ cPage = cPage.next;
+ }
+ } finally {
+ bucket.bucketLock.unlock();
+ }
+
+ return cPage;
+ }
+
+ @Override
+ public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException {
+ pinSanityCheck(dpid);
+
CachedPage cPage = findPage(dpid, newPage);
if (!newPage) {
if (!cPage.valid) {
@@ -472,7 +514,7 @@
}
@Override
- public void close() {
+ public void close() {
closed = true;
synchronized (cleanerThread) {
cleanerThread.shutdownStart = true;
@@ -484,7 +526,19 @@
e.printStackTrace();
}
}
- }
+ }
+
+ synchronized (fileInfoMap) {
+ try {
+ for(Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+ sweepAndFlush(entry.getKey());
+ ioManager.close(entry.getValue().getFileHandle());
+ }
+ } catch(HyracksDataException e) {
+ e.printStackTrace();
+ }
+ fileInfoMap.clear();
+ }
}
@Override
@@ -506,6 +560,29 @@
BufferedFileHandle fInfo;
fInfo = fileInfoMap.get(fileId);
if (fInfo == null) {
+
+ // map is full, make room by removing cleaning up unreferenced files
+ boolean unreferencedFileFound = true;
+ while(fileInfoMap.size() >= maxOpenFiles && unreferencedFileFound) {
+ unreferencedFileFound = false;
+ for(Map.Entry<Integer, BufferedFileHandle> entry : fileInfoMap.entrySet()) {
+ if(entry.getValue().getReferenceCount() <= 0) {
+ int entryFileId = entry.getKey();
+ sweepAndFlush(entryFileId);
+ fileInfoMap.remove(entryFileId);
+ ioManager.close(entry.getValue().getFileHandle());
+ unreferencedFileFound = true;
+ // for-each iterator is invalid because we changed fileInfoMap
+ break;
+ }
+ }
+ }
+
+ if(fileInfoMap.size() >= maxOpenFiles) {
+ throw new HyracksDataException("Could not open fileId " + fileId + ". Max number of files " + maxOpenFiles + " already opened and referenced.");
+ }
+
+ // create, open, and map new file reference
FileReference fileRef = fileMapManager.lookupFileName(fileId);
FileHandle fh = ioManager.open(fileRef, IIOManager.FileReadWriteMode.READ_WRITE,
IIOManager.FileSyncMode.METADATA_ASYNC_DATA_ASYNC);
@@ -515,7 +592,7 @@
fInfo.incReferenceCount();
}
}
-
+
private void sweepAndFlush(int fileId) throws HyracksDataException {
for (int i = 0; i < pageMap.length; ++i) {
CacheBucket bucket = pageMap[i];
@@ -574,10 +651,8 @@
if (fInfo == null) {
throw new HyracksDataException("Closing unopened file");
}
- if (fInfo.decReferenceCount() <= 0) {
- sweepAndFlush(fileId);
- fileInfoMap.remove(fileId);
- ioManager.close(fInfo.getFileHandle());
+ if (fInfo.decReferenceCount() < 0) {
+ throw new HyracksDataException("Closed fileId: " + fileId + " more times than it was opened.");
}
}
}
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
index b21c054..e82c01b 100644
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
+++ b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/buffercache/IBufferCache.java
@@ -26,6 +26,8 @@
public void deleteFile(int fileId) throws HyracksDataException;
+ public ICachedPage tryPin(long dpid) throws HyracksDataException;
+
public ICachedPage pin(long dpid, boolean newPage) throws HyracksDataException;
public void unpin(ICachedPage page) throws HyracksDataException;
diff --git a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
index 1db0bf7..3137f20 100644
--- a/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
+++ b/hyracks-storage-common/src/main/java/edu/uci/ics/hyracks/storage/common/file/BufferedFileHandle.java
@@ -44,6 +44,10 @@
public int decReferenceCount() {
return refCount.decrementAndGet();
}
+
+ public int getReferenceCount() {
+ return refCount.get();
+ }
public long getDiskPageId(int pageId) {
return getDiskPageId(fileId, pageId);
diff --git a/hyracks-test-support/.settings/org.eclipse.jdt.core.prefs b/hyracks-test-support/.settings/org.eclipse.jdt.core.prefs
index 18b0312..375e12e 100644
--- a/hyracks-test-support/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-test-support/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Wed Jan 05 15:09:31 PST 2011
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
index 5338d5e..2325db7 100644
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestStorageManagerComponentHolder.java
@@ -15,7 +15,8 @@
package edu.uci.ics.hyracks.test.support;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
@@ -29,17 +30,19 @@
public class TestStorageManagerComponentHolder {
private static IBufferCache bufferCache;
private static IFileMapProvider fileMapProvider;
- private static BTreeRegistry btreeRegistry;
+ private static IndexRegistry<ITreeIndex> treeIndexRegistry;
private static int pageSize;
private static int numPages;
+ private static int maxOpenFiles;
- public static void init(int pageSize, int numPages) {
+ public static void init(int pageSize, int numPages, int maxOpenFiles) {
TestStorageManagerComponentHolder.pageSize = pageSize;
TestStorageManagerComponentHolder.numPages = numPages;
+ TestStorageManagerComponentHolder.maxOpenFiles = maxOpenFiles;
bufferCache = null;
fileMapProvider = null;
- btreeRegistry = null;
+ treeIndexRegistry = null;
}
public synchronized static IBufferCache getBufferCache(IHyracksStageletContext ctx) {
@@ -48,7 +51,7 @@
IPageReplacementStrategy prs = new ClockPageReplacementStrategy();
IFileMapProvider fileMapProvider = getFileMapProvider(ctx);
bufferCache = new BufferCache(ctx.getIOManager(), allocator, prs, (IFileMapManager) fileMapProvider,
- pageSize, numPages);
+ pageSize, numPages, maxOpenFiles);
}
return bufferCache;
}
@@ -60,10 +63,10 @@
return fileMapProvider;
}
- public synchronized static BTreeRegistry getBTreeRegistry(IHyracksStageletContext ctx) {
- if (btreeRegistry == null) {
- btreeRegistry = new BTreeRegistry();
+ public synchronized static IndexRegistry<ITreeIndex> getTreeIndexRegistry(IHyracksStageletContext ctx) {
+ if (treeIndexRegistry == null) {
+ treeIndexRegistry = new IndexRegistry<ITreeIndex>();
}
- return btreeRegistry;
+ return treeIndexRegistry;
}
}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestBTreeRegistryProvider.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTreeIndexRegistryProvider.java
similarity index 63%
rename from hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestBTreeRegistryProvider.java
rename to hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTreeIndexRegistryProvider.java
index 3ab0427..630f47f 100644
--- a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestBTreeRegistryProvider.java
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/test/support/TestTreeIndexRegistryProvider.java
@@ -15,14 +15,15 @@
package edu.uci.ics.hyracks.test.support;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
-import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IIndexRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexRegistry;
-public class TestBTreeRegistryProvider implements IBTreeRegistryProvider {
+public class TestTreeIndexRegistryProvider implements IIndexRegistryProvider<ITreeIndex> {
private static final long serialVersionUID = 1L;
-
- @Override
- public BTreeRegistry getBTreeRegistry(IHyracksStageletContext ctx) {
- return TestStorageManagerComponentHolder.getBTreeRegistry(ctx);
- }
+
+ @Override
+ public IndexRegistry<ITreeIndex> getRegistry(IHyracksStageletContext ctx) {
+ return TestStorageManagerComponentHolder.getTreeIndexRegistry(ctx);
+ }
}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
index 3cd389e..7cf8ad6 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
@@ -1,6 +1,264 @@
-#Thu Jan 06 11:27:16 PST 2011
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6
+org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=48
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_assignment=0
+org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
+org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
+org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
+org.eclipse.jdt.core.formatter.alignment_for_enum_constants=48
+org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
+org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
+org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
+org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
+org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_after_package=1
+org.eclipse.jdt.core.formatter.blank_lines_before_field=0
+org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
+org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
+org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
+org.eclipse.jdt.core.formatter.blank_lines_before_method=1
+org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
+org.eclipse.jdt.core.formatter.blank_lines_before_package=0
+org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
+org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
+org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
+org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
+org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
+org.eclipse.jdt.core.formatter.comment.format_block_comments=true
+org.eclipse.jdt.core.formatter.comment.format_header=false
+org.eclipse.jdt.core.formatter.comment.format_html=true
+org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
+org.eclipse.jdt.core.formatter.comment.format_line_comments=true
+org.eclipse.jdt.core.formatter.comment.format_source_code=true
+org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
+org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
+org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
+org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
+org.eclipse.jdt.core.formatter.comment.line_length=80
+org.eclipse.jdt.core.formatter.compact_else_if=true
+org.eclipse.jdt.core.formatter.continuation_indentation=2
+org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
+org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
+org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
+org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_empty_lines=false
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
+org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
+org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=true
+org.eclipse.jdt.core.formatter.indentation.size=4
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
+org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
+org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
+org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
+org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
+org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
+org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
+org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
+org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
+org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
+org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
+org.eclipse.jdt.core.formatter.join_lines_in_comments=true
+org.eclipse.jdt.core.formatter.join_wrapped_lines=true
+org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
+org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
+org.eclipse.jdt.core.formatter.lineSplit=120
+org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
+org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
+org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
+org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
+org.eclipse.jdt.core.formatter.tabulation.char=space
+org.eclipse.jdt.core.formatter.tabulation.size=4
+org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
+org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
index c2e2fd6..56ae6e9 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/AbstractBTreeTest.java
@@ -3,11 +3,14 @@
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
+import java.util.logging.Logger;
import org.junit.AfterClass;
public abstract class AbstractBTreeTest {
+ protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
+
protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
protected final static String tmpDir = System.getProperty("java.io.tmpdir");
protected final static String sep = System.getProperty("file.separator");
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
index e856296..2b2bf55 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
@@ -39,15 +39,14 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrame;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
@@ -58,24 +57,14 @@
private static final int PAGE_SIZE = 32768; // 32K
private static final int NUM_PAGES = 40;
+ private static final int MAX_OPEN_FILES = 10;
private static final int HYRACKS_FRAME_SIZE = 128;
private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- public class BufferAllocator implements ICacheMemoryAllocator {
- @Override
- public ByteBuffer[] allocate(int pageSize, int numPages) {
- ByteBuffer[] buffers = new ByteBuffer[numPages];
- for (int i = 0; i < numPages; ++i) {
- buffers[i] = ByteBuffer.allocate(pageSize);
- }
- return buffers;
- }
- }
-
+
private ITupleReference createTuple(IHyracksStageletContext ctx, int f0,
int f1, int f2, boolean print) throws HyracksDataException {
if (print)
- System.out.println("CREATING: " + f0 + " " + f1 + " " + f2);
+ LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
ByteBuffer buf = ctx.allocateFrame();
FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
@@ -112,7 +101,7 @@
@Test
public void test01() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder
.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder
@@ -154,8 +143,8 @@
try {
IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
- IBTreeTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
- FieldPrefixNSMLeafFrame frame = new FieldPrefixNSMLeafFrame(
+ ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+ BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(
tupleWriter);
frame.setPage(page);
frame.initBuffer((byte) 0);
@@ -176,7 +165,7 @@
for (int i = 0; i < numRecords; i++) {
if ((i + 1) % 100 == 0)
- print("INSERTING " + (i + 1) + " / " + numRecords + "\n");
+ LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
int a = rnd.nextInt() % smallMax;
int b = rnd.nextInt() % smallMax;
@@ -184,7 +173,8 @@
ITupleReference tuple = createTuple(ctx, a, b, c, false);
try {
- frame.insert(tuple, cmp);
+ int targetTupleIndex = frame.findTupleIndex(tuple, cmp);
+ frame.insert(tuple, cmp, targetTupleIndex);
} catch (BTreeException e) {
e.printStackTrace();
} catch (Exception e) {
@@ -215,7 +205,7 @@
for (int i = 0; i < numRecords; i++) {
if ((i + 1) % 100 == 0)
- print("DELETING " + (i + 1) + " / " + numRecords + "\n");
+ LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
ITupleReference tuple = createTuple(ctx,
savedFields[i][0], savedFields[i][1],
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
new file mode 100644
index 0000000..44cf18b
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -0,0 +1,173 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexBufferCacheWarmup;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BTreeStatsTest extends AbstractBTreeTest {
+
+ // private static final int PAGE_SIZE = 256;
+ // private static final int NUM_PAGES = 10;
+ // private static final int PAGE_SIZE = 32768;
+ private static final int PAGE_SIZE = 4096;
+ private static final int NUM_PAGES = 1000;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ @Test
+ public void test01() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder
+ .getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder
+ .getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
+ typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+ tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+ tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(
+ bufferCache, fileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager,
+ interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ long start = System.currentTimeMillis();
+
+ LOGGER.info("INSERTING INTO TREE");
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = {
+ IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
+ .getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(
+ IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ // 10000
+ for (int i = 0; i < 100000; i++) {
+
+ int f0 = rnd.nextInt() % 100000;
+ int f1 = 5;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
+ .getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 10000 == 0) {
+ long end = System.currentTimeMillis();
+ LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " "
+ + (end - start));
+ }
+
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId, btree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ LOGGER.info(stats.toString());
+
+ TreeIndexBufferCacheWarmup bufferCacheWarmup = new TreeIndexBufferCacheWarmup(bufferCache, freePageManager, fileId);
+ bufferCacheWarmup.warmup(leafFrame, metaFrame, new int[] {1, 2}, new int[] {2, 5});
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
index 2985601..6fa5d43 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -39,1302 +39,1152 @@
import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.DiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.SimpleTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.SimpleTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestUtils;
-@SuppressWarnings("unchecked")
public class BTreeTest extends AbstractBTreeTest {
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
- private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
- public class BufferAllocator implements ICacheMemoryAllocator {
- @Override
- public ByteBuffer[] allocate(int pageSize, int numPages) {
- ByteBuffer[] buffers = new ByteBuffer[numPages];
- for (int i = 0; i < numPages; ++i) {
- buffers[i] = ByteBuffer.allocate(pageSize);
- }
- return buffers;
- }
- }
+ // FIXED-LENGTH KEY TEST
+ // create a B-tree with one fixed-length "key" field and one fixed-length
+ // "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test01() throws Exception {
- // FIXED-LENGTH KEY TEST
- // create a B-tree with one fixed-length "key" field and one fixed-length
- // "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test01() throws Exception {
+ LOGGER.info("FIXED-LENGTH KEY TEST");
- print("FIXED-LENGTH KEY TEST\n");
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- Random rnd = new Random();
- rnd.setSeed(50);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- long start = System.currentTimeMillis();
+ long start = System.currentTimeMillis();
- print("INSERTING INTO TREE\n");
+ LOGGER.info("INSERTING INTO TREE");
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
- leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- // 10000
- for (int i = 0; i < 10000; i++) {
+ // 10000
+ for (int i = 0; i < 10000; i++) {
- int f0 = rnd.nextInt() % 10000;
- int f1 = 5;
+ int f0 = rnd.nextInt() % 10000;
+ int f1 = 5;
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- tuple.reset(accessor, 0);
+ tuple.reset(accessor, 0);
- // System.out.println(tuple.getFieldCount() + " " +
- // tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
+ if (i % 1000 == 0) {
+ long end = System.currentTimeMillis();
+ LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start));
+ }
- if (i % 1000 == 0) {
- long end = System.currentTimeMillis();
- print("INSERTING " + i + " : " + f0 + " " + f1 + " "
- + (end - start) + "\n");
- }
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (BTreeException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
+ int maxPage = btree.getFreePageManager().getMaxPage(metaFrame);
+ LOGGER.info("MAXPAGE: " + maxPage);
+ LOGGER.info(btree.printStats());
+
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ LOGGER.info("DURATION: " + duration);
- // btree.printTree(leafFrame, interiorFrame);
- // System.out.println();
- }
- // btree.printTree(leafFrame, interiorFrame);
- // System.out.println();
+ // ordered scan
- int maxPage = btree.getMaxPage(metaFrame);
- System.out.println("MAXPAGE: " + maxPage);
+ LOGGER.info("ORDERED SCAN:");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- String stats = btree.printStats();
- print(stats);
+ // disk-order scan
+ LOGGER.info("DISK-ORDER SCAN:");
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+ BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
+ btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ // range search in [-1000, 1000]
+ LOGGER.info("RANGE SEARCH:");
- // ordered scan
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- // disk-order scan
- print("DISK-ORDER SCAN:\n");
- DiskOrderScanCursor diskOrderCursor = new DiskOrderScanCursor(leafFrame);
- btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- // range search in [-1000, 1000]
- print("RANGE SEARCH:\n");
+ appender.reset(frame, true);
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- appender.reset(frame, true);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
+ // create a B-tree with one two fixed-length "key" fields and one
+ // fixed-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test02() throws Exception {
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ LOGGER.info("COMPOSITE KEY TEST");
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- print("\n");
- }
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
- // create a B-tree with one two fixed-length "key" fields and one
- // fixed-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test02() throws Exception {
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- print("COMPOSITE KEY TEST\n");
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ long start = System.currentTimeMillis();
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ LOGGER.info("INSERTING INTO TREE");
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- Random rnd = new Random();
- rnd.setSeed(50);
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- long start = System.currentTimeMillis();
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- print("INSERTING INTO TREE\n");
+ for (int i = 0; i < 10000; i++) {
+ int f0 = rnd.nextInt() % 2000;
+ int f1 = rnd.nextInt() % 1000;
+ int f2 = 5;
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
- leafFrame, interiorFrame, metaFrame);
+ tuple.reset(accessor, 0);
- for (int i = 0; i < 10000; i++) {
- int f0 = rnd.nextInt() % 2000;
- int f1 = rnd.nextInt() % 1000;
- int f2 = 5;
+ if (i % 1000 == 0) {
+ LOGGER.info("INSERTING " + i + " : " + f0 + " " + f1);
+ }
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ LOGGER.info("DURATION: " + duration);
- tuple.reset(accessor, 0);
+ // try a simple index scan
+ LOGGER.info("ORDERED SCAN:");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- if (i % 1000 == 0) {
- print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
- }
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- }
- }
- // btree.printTree(leafFrame, interiorFrame);
+ // range search in [(-3),(3)]
+ LOGGER.info("RANGE SEARCH:");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- // try a simple index scan
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ appender.reset(frame, true);
- // range search in [(-3),(3)]
- print("RANGE SEARCH:\n");
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- appender.reset(frame, true);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
+ // only
+ // a
+ // single
+ // comparator
+ // for
+ // searching
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
- // only
- // a
- // single
- // comparator
- // for
- // searching
+ // VARIABLE-LENGTH TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test03() throws Exception {
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ LOGGER.info("VARIABLE-LENGTH KEY TEST");
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- print("\n");
- }
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // VARIABLE-LENGTH TEST
- // create a B-tree with one variable-length "key" field and one
- // variable-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test03() throws Exception {
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- print("VARIABLE-LENGTH KEY TEST\n");
+ SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ // TypeAwareTupleWriterFactory tupleWriterFactory = new
+ // TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- // TypeAwareTupleWriterFactory tupleWriterFactory = new
- // TypeAwareTupleWriterFactory(typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ int maxLength = 10; // max string length to be generated
+ for (int i = 0; i < 10000; i++) {
- Random rnd = new Random();
- rnd.setSeed(50);
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- ISerializerDeserializer[] recDescSers = {
- UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
- leafFrame, interiorFrame, metaFrame);
- int maxLength = 10; // max string length to be generated
- for (int i = 0; i < 10000; i++) {
+ tuple.reset(accessor, 0);
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1,
- rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1,
- rnd);
+ if (i % 1000 == 0) {
+ LOGGER.info("INSERTING " + i);
+ }
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ }
+ }
+ // btree.printTree();
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ LOGGER.info("DONE INSERTING");
- tuple.reset(accessor, 0);
+ // ordered scan
+ LOGGER.info("ORDERED SCAN:");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- if (i % 1000 == 0) {
- // print("INSERTING " + i + ": " + cmp.printRecord(record, 0) +
- // "\n");
- print("INSERTING " + i + "\n");
- }
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- // e.printStackTrace();
- }
- }
- // btree.printTree();
+ // range search in ["cbf", cc7"]
+ LOGGER.info("RANGE SEARCH:");
- System.out.println("DONE INSERTING");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- // ordered scan
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- // range search in ["cbf", cc7"]
- print("RANGE SEARCH:\n");
+ appender.reset(frame, true);
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ // build and append low key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ // build and append high key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- appender.reset(frame, true);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- // build and append low key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- // build and append high key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
-
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
-
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
-
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
-
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
-
- print("\n");
- }
-
- // DELETION TEST
- // create a B-tree with one variable-length "key" field and one
- // variable-length "value" field
- // fill B-tree with random values using insertions, then delete entries
- // one-by-one
- // repeat procedure a few times on same B-tree
- @Test
- public void test04() throws Exception {
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- print("DELETION TEST\n");
+ // DELETION TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions, then delete entries
+ // one-by-one
+ // repeat procedure a few times on same B-tree
+ @Test
+ public void test04() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ LOGGER.info("DELETION TEST");
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- Random rnd = new Random();
- rnd.setSeed(50);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- ISerializerDeserializer[] recDescSers = {
- UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ Random rnd = new Random();
+ rnd.setSeed(50);
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
- leafFrame, interiorFrame, metaFrame);
- BTreeOpContext deleteOpCtx = btree.createOpContext(BTreeOp.BTO_DELETE,
- leafFrame, interiorFrame, metaFrame);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- int runs = 3;
- for (int run = 0; run < runs; run++) {
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- print("DELETION TEST RUN: " + (run + 1) + "/" + runs + "\n");
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
- print("INSERTING INTO BTREE\n");
- int maxLength = 10;
- int ins = 10000;
- String[] f0s = new String[ins];
- String[] f1s = new String[ins];
- int insDone = 0;
- int[] insDoneCmp = new int[ins];
- for (int i = 0; i < ins; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength
- + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength
- + 1, rnd);
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
- f0s[i] = f0;
- f1s[i] = f1;
+ LOGGER.info("DELETION TEST RUN: " + (run + 1) + "/" + runs);
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ LOGGER.info("INSERTING INTO BTREE");
+ int maxLength = 10;
+ int ins = 10000;
+ String[] f0s = new String[ins];
+ String[] f1s = new String[ins];
+ int insDone = 0;
+ int[] insDoneCmp = new int[ins];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize());
+ f0s[i] = f0;
+ f1s[i] = f1;
- tuple.reset(accessor, 0);
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- if (i % 1000 == 0) {
- print("INSERTING " + i + "\n");
- // print("INSERTING " + i + ": " + cmp.printRecord(record,
- // 0) + "\n");
- }
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- try {
- btree.insert(tuple, insertOpCtx);
- insDone++;
- } catch (BTreeException e) {
- // e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+ tuple.reset(accessor, 0);
- insDoneCmp[i] = insDone;
- }
- // btree.printTree();
- // btree.printStats();
+ if (i % 1000 == 0) {
+ LOGGER.info("INSERTING " + i);
+ }
- print("DELETING FROM BTREE\n");
- int delDone = 0;
- for (int i = 0; i < ins; i++) {
+ try {
+ btree.insert(tuple, insertOpCtx);
+ insDone++;
+ } catch (TreeIndexException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE
- .serialize(f0s[i], dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE
- .serialize(f1s[i], dos);
- tb.addFieldEndOffset();
+ insDoneCmp[i] = insDone;
+ }
+ // btree.printTree();
+ // btree.printStats();
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize());
+ LOGGER.info("DELETING FROM BTREE");
+ int delDone = 0;
+ for (int i = 0; i < ins; i++) {
- tuple.reset(accessor, 0);
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
+ tb.addFieldEndOffset();
- if (i % 1000 == 0) {
- // print("DELETING " + i + ": " +
- // cmp.printRecord(records[i], 0) + "\n");
- print("DELETING " + i + "\n");
- }
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- try {
- btree.delete(tuple, deleteOpCtx);
- delDone++;
- } catch (BTreeException e) {
- // e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+ tuple.reset(accessor, 0);
- if (insDoneCmp[i] != delDone) {
- print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
- print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
- break;
- }
- // btree.printTree();
- }
- // btree.printTree(leafFrame, interiorFrame);
+ if (i % 1000 == 0) {
+ LOGGER.info("DELETING " + i);
+ }
- if (insDone != delDone) {
- print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
- break;
- }
- }
+ try {
+ btree.delete(tuple, deleteOpCtx);
+ delDone++;
+ } catch (TreeIndexException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ if (insDoneCmp[i] != delDone) {
+ LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION TEST");
+ LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+ break;
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
- print("\n");
- }
+ if (insDone != delDone) {
+ LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+ break;
+ }
+ }
- // BULK LOAD TEST
- // insert 100,000 records in bulk
- // B-tree has a composite key to "simulate" non-unique index creation
- // do range search
- @Test
- public void test05() throws Exception {
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- print("BULK LOAD TEST\n");
+ // BULK LOAD TEST
+ // insert 100,000 records in bulk
+ // B-tree has a composite key to "simulate" non-unique index creation
+ // do range search
+ @Test
+ public void test05() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ LOGGER.info("BULK LOAD TEST");
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- Random rnd = new Random();
- rnd.setSeed(50);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ Random rnd = new Random();
+ rnd.setSeed(50);
- BTree.BulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f,
- leafFrame, interiorFrame, metaFrame);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- // generate sorted records
- int ins = 100000;
- print("BULK LOADING " + ins + " RECORDS\n");
- long start = System.currentTimeMillis();
- for (int i = 0; i < ins; i++) {
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
- tb.addFieldEndOffset();
+ IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ // generate sorted records
+ int ins = 100000;
+ LOGGER.info("BULK LOADING " + ins + " RECORDS");
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < ins; i++) {
- tuple.reset(accessor, 0);
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
+ tb.addFieldEndOffset();
- btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
- }
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- btree.endBulkLoad(bulkLoadCtx);
+ tuple.reset(accessor, 0);
- // btree.printTree(leafFrame, interiorFrame);
+ btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
+ }
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ btree.endBulkLoad(bulkLoadCtx);
- // range search
- print("RANGE SEARCH:\n");
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ // btree.printTree(leafFrame, interiorFrame);
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
- DataOutput kdos = ktb.getDataOutput();
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ LOGGER.info("DURATION: " + duration);
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // range search
+ LOGGER.info("RANGE SEARCH:");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
- appender.reset(frame, true);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+ DataOutput kdos = ktb.getDataOutput();
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ appender.reset(frame, true);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- // TODO: check when searching backwards
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ // TODO: check when searching backwards
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- print("\n");
- }
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
- // demo for Arjun to show easy support of intersection queries on
- // time-intervals
- @Test
- public void test06() throws Exception {
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- print("TIME-INTERVAL INTERSECTION DEMO\n");
+ // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
+ // demo for Arjun to show easy support of intersection queries on
+ // time-intervals
+ @Test
+ public void test06() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ LOGGER.info("TIME-INTERVAL INTERSECTION DEMO");
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- Random rnd = new Random();
- rnd.setSeed(50);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ Random rnd = new Random();
+ rnd.setSeed(50);
- long start = System.currentTimeMillis();
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- int intervalCount = 10;
- int[][] intervals = new int[intervalCount][2];
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- intervals[0][0] = 10;
- intervals[0][1] = 20;
+ long start = System.currentTimeMillis();
- intervals[1][0] = 11;
- intervals[1][1] = 20;
+ int intervalCount = 10;
+ int[][] intervals = new int[intervalCount][2];
- intervals[2][0] = 12;
- intervals[2][1] = 20;
+ intervals[0][0] = 10;
+ intervals[0][1] = 20;
- intervals[3][0] = 13;
- intervals[3][1] = 20;
+ intervals[1][0] = 11;
+ intervals[1][1] = 20;
- intervals[4][0] = 14;
- intervals[4][1] = 20;
+ intervals[2][0] = 12;
+ intervals[2][1] = 20;
- intervals[5][0] = 20;
- intervals[5][1] = 30;
+ intervals[3][0] = 13;
+ intervals[3][1] = 20;
- intervals[6][0] = 20;
- intervals[6][1] = 31;
+ intervals[4][0] = 14;
+ intervals[4][1] = 20;
- intervals[7][0] = 20;
- intervals[7][1] = 32;
+ intervals[5][0] = 20;
+ intervals[5][1] = 30;
- intervals[8][0] = 20;
- intervals[8][1] = 33;
+ intervals[6][0] = 20;
+ intervals[6][1] = 31;
- intervals[9][0] = 20;
- intervals[9][1] = 35;
+ intervals[7][0] = 20;
+ intervals[7][1] = 32;
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
- leafFrame, interiorFrame, metaFrame);
+ intervals[8][0] = 20;
+ intervals[8][1] = 33;
- // int exceptionCount = 0;
- for (int i = 0; i < intervalCount; i++) {
- int f0 = intervals[i][0];
- int f1 = intervals[i][1];
- int f2 = rnd.nextInt() % 100;
+ intervals[9][0] = 20;
+ intervals[9][1] = 35;
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ // int exceptionCount = 0;
+ for (int i = 0; i < intervalCount; i++) {
+ int f0 = intervals[i][0];
+ int f1 = intervals[i][1];
+ int f2 = rnd.nextInt() % 100;
- tuple.reset(accessor, 0);
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
- // print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
- print("INSERTING " + i + "\n");
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- // e.printStackTrace();
- }
- }
- // btree.printTree(leafFrame, interiorFrame);
- // btree.printStats();
+ tuple.reset(accessor, 0);
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ LOGGER.info("INSERTING " + i);
- // try a simple index scan
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
+ // btree.printStats();
- print("ORDERED SCAN:\n");
- IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ LOGGER.info("DURATION: " + duration);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ // try a simple index scan
- // try a range search
- print("RANGE SEARCH:\n");
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ LOGGER.info("ORDERED SCAN:");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- ISerializerDeserializer[] keyDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // try a range search
+ LOGGER.info("RANGE SEARCH:");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- appender.reset(frame, true);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ appender.reset(frame, true);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- IBinaryComparator[] searchCmps = new IBinaryComparator[2];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- // print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " +
- // cmp.printKey(highKey, 0) + "\n");
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ IBinaryComparator[] searchCmps = new IBinaryComparator[2];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ LOGGER.info(rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- print("\n");
- }
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
- public static String randomString(int length, Random random) {
- String s = Long.toHexString(Double
- .doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder
- .append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- return strBuilder.toString();
- }
+ public static String randomString(int length, Random random) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
index 12371be..1a3b219 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
@@ -45,27 +45,27 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeFieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestUtils;
@@ -73,18 +73,8 @@
public class RangeSearchCursorTest extends AbstractBTreeTest {
private static final int PAGE_SIZE = 256;
private static final int NUM_PAGES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
-
- public class BufferAllocator implements ICacheMemoryAllocator {
- @Override
- public ByteBuffer[] allocate(int pageSize, int numPages) {
- ByteBuffer[] buffers = new ByteBuffer[numPages];
- for (int i = 0; i < numPages; ++i) {
- buffers[i] = ByteBuffer.allocate(pageSize);
- }
- return buffers;
- }
- }
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
// declare fields
int fieldCount = 2;
@@ -92,15 +82,15 @@
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
typeTraits);
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
ByteBuffer frame = ctx.allocateFrame();
@@ -126,9 +116,9 @@
@Test
public void uniqueIndexTest() throws Exception {
- System.out.println("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
+ LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder
.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder
@@ -146,7 +136,9 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
btree.open(fileId);
@@ -154,7 +146,7 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
DataOutput dos = tb.getDataOutput();
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
leafFrame, interiorFrame, metaFrame);
// generate keys
@@ -199,8 +191,6 @@
int minSearchKey = -100;
int maxSearchKey = 100;
- // System.out.println("STARTING SEARCH TESTS");
-
// forward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey,
maxSearchKey, true, true, true, false);
@@ -229,9 +219,9 @@
@Test
public void nonUniqueIndexTest() throws Exception {
- System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
+ LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder
.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder
@@ -251,7 +241,9 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
btree.open(fileId);
@@ -259,7 +251,7 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
DataOutput dos = tb.getDataOutput();
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
leafFrame, interiorFrame, metaFrame);
// generate keys
@@ -301,8 +293,6 @@
int minSearchKey = -100;
int maxSearchKey = 100;
- // System.out.println("STARTING SEARCH TESTS");
-
// forward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey,
maxSearchKey, true, true, true, false);
@@ -331,14 +321,13 @@
@Test
public void nonUniqueFieldPrefixIndexTest() throws Exception {
- System.out
- .println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
+ LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
- IBTreeLeafFrameFactory leafFrameFactory = new FieldPrefixNSMLeafFrameFactory(
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeFieldPrefixNSMLeafFrameFactory(
tupleWriterFactory);
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder
.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder
@@ -358,7 +347,9 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- BTree btree = new BTree(bufferCache, interiorFrameFactory,
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
leafFrameFactory, cmp);
btree.create(fileId, leafFrame, metaFrame);
btree.open(fileId);
@@ -366,7 +357,7 @@
ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
DataOutput dos = tb.getDataOutput();
- BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT,
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
leafFrame, interiorFrame, metaFrame);
// generate keys
@@ -408,8 +399,6 @@
int minSearchKey = -100;
int maxSearchKey = 100;
- // System.out.println("STARTING SEARCH TESTS");
-
// forward searches
performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey,
maxSearchKey, true, true, true, false);
@@ -534,21 +523,19 @@
for (int i = minKey; i < maxKey; i++) {
for (int j = minKey; j < maxKey; j++) {
- // if(i != -100 || j != 1) continue;
-
results.clear();
expectedResults.clear();
int lowKey = i;
int highKey = j;
- IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
RangePredicate rangePred = createRangePredicate(lowKey,
highKey, isForward, lowKeyInclusive, highKeyInclusive,
btree.getMultiComparator(), btree.getMultiComparator()
.getTypeTraits());
BTreeOpContext searchOpCtx = btree.createOpContext(
- BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+ IndexOp.SEARCH, leafFrame, interiorFrame, null);
btree.search(rangeCursor, rangePred, searchOpCtx);
try {
@@ -587,29 +574,31 @@
else
u = ')';
- System.out.println("RANGE: " + l + " " + lowKey + " , "
+ LOGGER.info("RANGE: " + l + " " + lowKey + " , "
+ highKey + " " + u);
- for (Integer r : expectedResults)
- System.out.print(r + " ");
- System.out.println();
+ StringBuilder strBuilder = new StringBuilder();
+ for (Integer r : expectedResults) {
+ strBuilder.append(r + " ");
+ }
+ LOGGER.info(strBuilder.toString());
}
}
if (results.size() == expectedResults.size()) {
for (int k = 0; k < results.size(); k++) {
if (!results.get(k).equals(expectedResults.get(k))) {
- System.out.println("DIFFERENT RESULTS AT: i=" + i
+ LOGGER.info("DIFFERENT RESULTS AT: i=" + i
+ " j=" + j + " k=" + k);
- System.out.println(results.get(k) + " "
+ LOGGER.info(results.get(k) + " "
+ expectedResults.get(k));
return false;
}
}
} else {
- System.out.println("UNEQUAL NUMBER OF RESULTS AT: i=" + i
+ LOGGER.info("UNEQUAL NUMBER OF RESULTS AT: i=" + i
+ " j=" + j);
- System.out.println("RESULTS: " + results.size());
- System.out.println("EXPECTED RESULTS: "
+ LOGGER.info("RESULTS: " + results.size());
+ LOGGER.info("EXPECTED RESULTS: "
+ expectedResults.size());
return false;
}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
index 430a33c..1a510e6 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -25,6 +25,7 @@
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.AbstractBTreeTest;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
@@ -36,6 +37,8 @@
public class StorageManagerTest extends AbstractBTreeTest {
private static final int PAGE_SIZE = 256;
private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
private IHyracksStageletContext ctx = TestUtils.create(32768);
public class PinnedLatchedPage {
@@ -87,7 +90,7 @@
private void pinRandomPage() {
int pageId = Math.abs(rnd.nextInt() % maxPages);
- System.out.println(workerId + " PINNING PAGE: " + pageId);
+ LOGGER.info(workerId + " PINNING PAGE: " + pageId);
try {
ICachedPage page = bufferCache.pin(BufferedFileHandle
@@ -102,14 +105,14 @@
break;
case FTA_READONLY: {
- System.out.println(workerId + " S LATCHING: " + pageId);
+ LOGGER.info(workerId + " S LATCHING: " + pageId);
page.acquireReadLatch();
latch = LatchType.LATCH_S;
}
break;
case FTA_WRITEONLY: {
- System.out.println(workerId + " X LATCHING: " + pageId);
+ LOGGER.info(workerId + " X LATCHING: " + pageId);
page.acquireWriteLatch();
latch = LatchType.LATCH_X;
}
@@ -117,11 +120,11 @@
case FTA_MIXED: {
if (rnd.nextInt() % 2 == 0) {
- System.out.println(workerId + " S LATCHING: " + pageId);
+ LOGGER.info(workerId + " S LATCHING: " + pageId);
page.acquireReadLatch();
latch = LatchType.LATCH_S;
} else {
- System.out.println(workerId + " X LATCHING: " + pageId);
+ LOGGER.info(workerId + " X LATCHING: " + pageId);
page.acquireWriteLatch();
latch = LatchType.LATCH_X;
}
@@ -145,16 +148,16 @@
if (plPage.latch != null) {
if (plPage.latch == LatchType.LATCH_S) {
- System.out.println(workerId + " S UNLATCHING: "
+ LOGGER.info(workerId + " S UNLATCHING: "
+ plPage.pageId);
plPage.page.releaseReadLatch();
} else {
- System.out.println(workerId + " X UNLATCHING: "
+ LOGGER.info(workerId + " X UNLATCHING: "
+ plPage.pageId);
plPage.page.releaseWriteLatch();
}
}
- System.out.println(workerId + " UNPINNING PAGE: "
+ LOGGER.info(workerId + " UNPINNING PAGE: "
+ plPage.pageId);
bufferCache.unpin(plPage.page);
@@ -165,7 +168,7 @@
}
private void openFile() {
- System.out.println(workerId + " OPENING FILE: " + fileId);
+ LOGGER.info(workerId + " OPENING FILE: " + fileId);
try {
bufferCache.openFile(fileId);
fileIsOpen = true;
@@ -175,7 +178,7 @@
}
private void closeFile() {
- System.out.println(workerId + " CLOSING FILE: " + fileId);
+ LOGGER.info(workerId + " CLOSING FILE: " + fileId);
try {
bufferCache.closeFile(fileId);
fileIsOpen = false;
@@ -192,7 +195,7 @@
while (loopCount < maxLoopCount) {
loopCount++;
- System.out.println(workerId + " LOOP: " + loopCount + "/"
+ LOGGER.info(workerId + " LOOP: " + loopCount + "/"
+ maxLoopCount);
if (fileIsOpen) {
@@ -247,7 +250,7 @@
@Test
public void oneThreadOneFileTest() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder
.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
index 3cd389e..375e12e 100644
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
@@ -1,4 +1,4 @@
-#Thu Jan 06 11:27:16 PST 2011
+#Fri May 20 19:34:07 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.compliance=1.6
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
new file mode 100644
index 0000000..b42001d
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
@@ -0,0 +1,224 @@
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractInvIndexSearchTest extends AbstractInvIndexTest {
+ protected final int PAGE_SIZE = 32768;
+ protected final int NUM_PAGES = 100;
+ protected final int MAX_OPEN_FILES = 10;
+ protected final int HYRACKS_FRAME_SIZE = 32768;
+ protected IHyracksStageletContext stageletCtx = TestUtils
+ .create(HYRACKS_FRAME_SIZE);
+
+ protected IBufferCache bufferCache;
+ protected IFileMapProvider fmp;
+
+ // --- BTREE ---
+
+ // create file refs
+ protected FileReference btreeFile = new FileReference(new File(
+ btreeFileName));
+ protected int btreeFileId;
+
+ // declare btree fields
+ protected int fieldCount = 5;
+ protected ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+
+ // declare btree keys
+ protected int btreeKeyFieldCount = 1;
+ protected IBinaryComparator[] btreeBinCmps = new IBinaryComparator[btreeKeyFieldCount];
+ protected MultiComparator btreeCmp = new MultiComparator(typeTraits,
+ btreeBinCmps);
+
+ // btree frame factories
+ protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
+ typeTraits);
+ protected ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+ tupleWriterFactory);
+ protected ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+ tupleWriterFactory);
+ protected ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ // btree frames
+ protected ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ protected ITreeIndexMetaDataFrame metaFrame = metaFrameFactory
+ .createFrame();
+
+ protected IFreePageManager freePageManager;
+
+ protected BTree btree;
+
+ // --- INVERTED INDEX ---
+
+ protected FileReference invListsFile = new FileReference(new File(
+ invListsFileName));
+ protected int invListsFileId;
+
+ protected int invListFields = 1;
+ protected ITypeTrait[] invListTypeTraits = new ITypeTrait[invListFields];
+
+ protected int invListKeys = 1;
+ protected IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
+ protected MultiComparator invListCmp = new MultiComparator(
+ invListTypeTraits, invListBinCmps);
+
+ protected InvertedIndex invIndex;
+
+ protected Random rnd = new Random();
+
+ protected ByteBuffer frame = stageletCtx.allocateFrame();
+ protected FrameTupleAppender appender = new FrameTupleAppender(
+ stageletCtx.getFrameSize());
+ protected ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+ protected DataOutput dos = tb.getDataOutput();
+
+ protected ISerializerDeserializer[] insertSerde = {
+ UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ protected RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
+ protected IFrameTupleAccessor accessor = new FrameTupleAccessor(
+ stageletCtx.getFrameSize(), insertRecDesc);
+
+ protected FrameTupleReference tuple = new FrameTupleReference();
+
+ protected ArrayList<ArrayList<Integer>> checkInvLists = new ArrayList<ArrayList<Integer>>();
+
+ protected int maxId = 1000000;
+ // protected int maxId = 1000;
+ protected int[] scanCountArray = new int[maxId];
+ protected ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+
+ protected ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
+ protected RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
+
+ protected FrameTupleAppender queryAppender = new FrameTupleAppender(
+ stageletCtx.getFrameSize());
+ protected ArrayTupleBuilder queryTb = new ArrayTupleBuilder(
+ querySerde.length);
+ protected DataOutput queryDos = queryTb.getDataOutput();
+
+ protected IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(
+ stageletCtx.getFrameSize(), queryRecDesc);
+ protected FrameTupleReference queryTuple = new FrameTupleReference();
+
+ protected ITokenFactory tokenFactory;
+ protected IBinaryTokenizer tokenizer;
+
+ protected TOccurrenceSearcher searcher;
+ protected IInvertedIndexResultCursor resultCursor;
+
+ /**
+ * Initialize members, generate data, and bulk load the inverted index.
+ *
+ */
+ @Before
+ public void start() throws Exception {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
+ MAX_OPEN_FILES);
+ bufferCache = TestStorageManagerComponentHolder
+ .getBufferCache(stageletCtx);
+ fmp = TestStorageManagerComponentHolder.getFileMapProvider(stageletCtx);
+
+ // --- BTREE ---
+
+ bufferCache.createFile(btreeFile);
+ btreeFileId = fmp.lookupFileId(btreeFile);
+ bufferCache.openFile(btreeFileId);
+
+ // token (key)
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ // startPageId
+ typeTraits[1] = new TypeTrait(4);
+ // endPageId
+ typeTraits[2] = new TypeTrait(4);
+ // startOff
+ typeTraits[3] = new TypeTrait(4);
+ // numElements
+ typeTraits[4] = new TypeTrait(4);
+
+ btreeBinCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
+
+ freePageManager = new LinkedListFreePageManager(bufferCache,
+ btreeFileId, 0, metaFrameFactory);
+
+ btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
+ leafFrameFactory, btreeCmp);
+ btree.create(btreeFileId, leafFrame, metaFrame);
+ btree.open(btreeFileId);
+
+ // --- INVERTED INDEX ---
+
+ bufferCache.createFile(invListsFile);
+ invListsFileId = fmp.lookupFileId(invListsFile);
+ bufferCache.openFile(invListsFileId);
+
+ invListTypeTraits[0] = new TypeTrait(4);
+ invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
+
+ invIndex = new InvertedIndex(bufferCache, btree, invListCmp);
+ invIndex.open(invListsFileId);
+
+ rnd.setSeed(50);
+
+ accessor.reset(frame);
+ queryAccessor.reset(frame);
+ }
+
+ @After
+ public void deinit() throws HyracksDataException {
+ AbstractInvIndexTest.tearDown();
+ btree.close();
+ invIndex.close();
+ bufferCache.closeFile(btreeFileId);
+ bufferCache.closeFile(invListsFileId);
+ bufferCache.close();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
new file mode 100644
index 0000000..cc8ab15
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexTest.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.logging.Logger;
+
+public abstract class AbstractInvIndexTest {
+
+ protected static final Logger LOGGER = Logger
+ .getLogger(AbstractInvIndexTest.class.getName());
+
+ protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
+ "ddMMyy-hhmmssSS");
+ protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final static String sep = System.getProperty("file.separator");
+ protected final static String baseFileName = tmpDir + sep
+ + simpleDateFormat.format(new Date());
+ protected final static String btreeFileName = baseFileName + "btree";
+ protected final static String invListsFileName = baseFileName + "invlists";
+
+ public static void tearDown() {
+ File btreeFile = new File(btreeFileName);
+ btreeFile.deleteOnExit();
+ File invListsFile = new File(invListsFileName);
+ invListsFile.deleteOnExit();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
new file mode 100644
index 0000000..fb0b7b9
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
@@ -0,0 +1,345 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+import junit.framework.Assert;
+
+import org.junit.AfterClass;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BulkLoadTest extends AbstractInvIndexTest {
+
+ private static final int PAGE_SIZE = 32768;
+ private static final int NUM_PAGES = 100;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 32768;
+ private IHyracksStageletContext stageletCtx = TestUtils
+ .create(HYRACKS_FRAME_SIZE);
+
+ /**
+ * This test generates a list of <word-token, id> pairs which are pre-sorted
+ * on the token. Those pairs for the input to an inverted-index bulk load.
+ * The contents of the inverted lists are verified against the generated
+ * data.
+ *
+ */
+ @Test
+ public void singleFieldPayloadTest() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
+ MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder
+ .getBufferCache(stageletCtx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder
+ .getFileMapProvider(stageletCtx);
+
+ // create file refs
+ FileReference btreeFile = new FileReference(new File(btreeFileName));
+ bufferCache.createFile(btreeFile);
+ int btreeFileId = fmp.lookupFileId(btreeFile);
+ bufferCache.openFile(btreeFileId);
+
+ FileReference invListsFile = new FileReference(new File(
+ invListsFileName));
+ bufferCache.createFile(invListsFile);
+ int invListsFileId = fmp.lookupFileId(invListsFile);
+ bufferCache.openFile(invListsFileId);
+
+ // declare btree fields
+ int fieldCount = 5;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ // token (key)
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ // startPageId
+ typeTraits[1] = new TypeTrait(4);
+ // endPageId
+ typeTraits[2] = new TypeTrait(4);
+ // startOff
+ typeTraits[3] = new TypeTrait(4);
+ // numElements
+ typeTraits[4] = new TypeTrait(4);
+
+ // declare btree keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
+
+ MultiComparator btreeCmp = new MultiComparator(typeTraits, cmps);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
+ typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(
+ tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(
+ tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(
+ bufferCache, btreeFileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager,
+ interiorFrameFactory, leafFrameFactory, btreeCmp);
+ btree.create(btreeFileId, leafFrame, metaFrame);
+ btree.open(btreeFileId);
+
+ int invListFields = 1;
+ ITypeTrait[] invListTypeTraits = new ITypeTrait[invListFields];
+ invListTypeTraits[0] = new TypeTrait(4);
+
+ int invListKeys = 1;
+ IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
+ invListBinCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
+ .createBinaryComparator();
+
+ MultiComparator invListCmp = new MultiComparator(invListTypeTraits,
+ invListBinCmps);
+
+ InvertedIndex invIndex = new InvertedIndex(bufferCache, btree,
+ invListCmp);
+ invIndex.open(invListsFileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ ByteBuffer frame = stageletCtx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(
+ stageletCtx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] insertSerde = {
+ UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(
+ stageletCtx.getFrameSize(), insertRecDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ List<String> tokens = new ArrayList<String>();
+ tokens.add("compilers");
+ tokens.add("computer");
+ tokens.add("databases");
+ tokens.add("fast");
+ tokens.add("hyracks");
+ tokens.add("major");
+ tokens.add("science");
+ tokens.add("systems");
+ tokens.add("university");
+
+ ArrayList<ArrayList<Integer>> checkListElements = new ArrayList<ArrayList<Integer>>();
+ for (int i = 0; i < tokens.size(); i++) {
+ checkListElements.add(new ArrayList<Integer>());
+ }
+
+ int maxId = 1000000;
+ int addProb = 0;
+ int addProbStep = 10;
+
+ IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+ invListTypeTraits);
+ InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
+ invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
+
+ int totalElements = 0;
+ for (int i = 0; i < tokens.size(); i++) {
+
+ addProb += addProbStep * (i + 1);
+ for (int j = 0; j < maxId; j++) {
+ if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+
+ totalElements++;
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(
+ tokens.get(i), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+ tb.addFieldEndOffset();
+
+ checkListElements.get(i).add(j);
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(),
+ 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
+ invIndex.bulkLoadAddTuple(ctx, tuple);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ invIndex.endBulkLoad(ctx);
+
+ // ------- START VERIFICATION -----------
+
+ ITreeIndexCursor btreeCursor = new BTreeRangeSearchCursor(
+ (IBTreeLeafFrame) leafFrame);
+ FrameTupleReference searchKey = new FrameTupleReference();
+ RangePredicate btreePred = new RangePredicate(true, searchKey,
+ searchKey, true, true, btreeCmp, btreeCmp);
+
+ IInvertedListCursor invListCursor = new FixedSizeElementInvertedListCursor(
+ bufferCache, invListsFileId, invListTypeTraits);
+
+ ISerializerDeserializer[] tokenSerde = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor tokenRecDesc = new RecordDescriptor(tokenSerde);
+ FrameTupleAppender tokenAppender = new FrameTupleAppender(
+ stageletCtx.getFrameSize());
+ ArrayTupleBuilder tokenTupleBuilder = new ArrayTupleBuilder(1);
+ DataOutput tokenDos = tokenTupleBuilder.getDataOutput();
+ IFrameTupleAccessor tokenAccessor = new FrameTupleAccessor(
+ stageletCtx.getFrameSize(), tokenRecDesc);
+ tokenAccessor.reset(frame);
+
+ BTreeOpContext btreeOpCtx = invIndex.getBTree().createOpContext(
+ IndexOp.SEARCH, leafFrame, interiorFrame, null);
+
+ // verify created inverted lists one-by-one
+ for (int i = 0; i < tokens.size(); i++) {
+
+ tokenTupleBuilder.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i),
+ tokenDos);
+ tokenTupleBuilder.addFieldEndOffset();
+
+ tokenAppender.reset(frame, true);
+ tokenAppender.append(tokenTupleBuilder.getFieldEndOffsets(),
+ tokenTupleBuilder.getByteArray(), 0,
+ tokenTupleBuilder.getSize());
+
+ searchKey.reset(tokenAccessor, 0);
+
+ invIndex.openCursor(btreeCursor, btreePred, btreeOpCtx,
+ invListCursor);
+
+ invListCursor.pinPagesSync();
+ int checkIndex = 0;
+ while (invListCursor.hasNext()) {
+ invListCursor.next();
+ ITupleReference invListTuple = invListCursor.getTuple();
+ int invListElement = IntegerSerializerDeserializer.getInt(
+ invListTuple.getFieldData(0),
+ invListTuple.getFieldStart(0));
+ int checkInvListElement = checkListElements.get(i)
+ .get(checkIndex).intValue();
+ Assert.assertEquals(invListElement, checkInvListElement);
+ checkIndex++;
+ }
+ invListCursor.unpinPages();
+ Assert.assertEquals(checkIndex, checkListElements.get(i).size());
+ }
+
+ // check that non-existing tokens have an empty inverted list
+ List<String> nonExistingTokens = new ArrayList<String>();
+ nonExistingTokens.add("watermelon");
+ nonExistingTokens.add("avocado");
+ nonExistingTokens.add("lemon");
+
+ for (int i = 0; i < nonExistingTokens.size(); i++) {
+
+ tokenTupleBuilder.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(
+ nonExistingTokens.get(i), tokenDos);
+ tokenTupleBuilder.addFieldEndOffset();
+
+ tokenAppender.reset(frame, true);
+ tokenAppender.append(tokenTupleBuilder.getFieldEndOffsets(),
+ tokenTupleBuilder.getByteArray(), 0,
+ tokenTupleBuilder.getSize());
+
+ searchKey.reset(tokenAccessor, 0);
+
+ invIndex.openCursor(btreeCursor, btreePred, btreeOpCtx,
+ invListCursor);
+
+ invListCursor.pinPagesSync();
+ Assert.assertEquals(invListCursor.hasNext(), false);
+ invListCursor.unpinPages();
+ }
+
+ btree.close();
+ bufferCache.closeFile(btreeFileId);
+ bufferCache.closeFile(invListsFileId);
+ bufferCache.close();
+ }
+
+ @AfterClass
+ public static void deinit() {
+ AbstractInvIndexTest.tearDown();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
new file mode 100644
index 0000000..0b49ec1
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/FixedSizeFrameTupleTest.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Random;
+
+import junit.framework.Assert;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeFrameTupleAppender;
+
+public class FixedSizeFrameTupleTest {
+
+ private static int FRAME_SIZE = 4096;
+
+ private Random rnd = new Random(50);
+
+ /**
+ * This test verifies the correct behavior of the FixedSizeFrameTuple class.
+ * Frames containing FixedSizeFrameTuple's require neither tuple slots nor
+ * field slots. The tests inserts generated data into a frame until the
+ * frame is full, and then verifies the frame's contents.
+ *
+ */
+ @Test
+ public void singleFieldTest() throws Exception {
+ ByteBuffer buffer = ByteBuffer.allocate(FRAME_SIZE);
+
+ ITypeTrait[] fields = new TypeTrait[1];
+ fields[0] = new TypeTrait(4);
+
+ FixedSizeFrameTupleAppender ftapp = new FixedSizeFrameTupleAppender(
+ FRAME_SIZE, fields);
+ FixedSizeFrameTupleAccessor ftacc = new FixedSizeFrameTupleAccessor(
+ FRAME_SIZE, fields);
+
+ boolean frameHasSpace = true;
+
+ ArrayList<Integer> check = new ArrayList<Integer>();
+
+ ftapp.reset(buffer, true);
+ while (frameHasSpace) {
+ int val = rnd.nextInt();
+ frameHasSpace = ftapp.append(val);
+ if (frameHasSpace) {
+ check.add(val);
+ ftapp.incrementTupleCount(1);
+ }
+ }
+
+ ftacc.reset(buffer);
+ for (int i = 0; i < ftacc.getTupleCount(); i++) {
+ int val = IntegerSerializerDeserializer.getInt(ftacc.getBuffer()
+ .array(), ftacc.getTupleStartOffset(i));
+ Assert.assertEquals(check.get(i).intValue(), val);
+ }
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
new file mode 100644
index 0000000..5f15a91
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/NGramTokenizerTest.java
@@ -0,0 +1,247 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8NGramTokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8NGramTokenFactory;
+
+public class NGramTokenizerTest {
+
+ private char PRECHAR = '#';
+ private char POSTCHAR = '$';
+
+ private String str = "Jürgen S. Generic's Car";
+ private byte[] inputBuffer;
+
+ private int gramLength = 3;
+
+ private void getExpectedGrams(String s, int gramLength,
+ ArrayList<String> grams, boolean prePost) {
+
+ String tmp = s.toLowerCase();
+ if (prePost) {
+ StringBuilder preBuilder = new StringBuilder();
+ for (int i = 0; i < gramLength - 1; i++) {
+ preBuilder.append(PRECHAR);
+ }
+ String pre = preBuilder.toString();
+
+ StringBuilder postBuilder = new StringBuilder();
+ for (int i = 0; i < gramLength - 1; i++) {
+ postBuilder.append(POSTCHAR);
+ }
+ String post = postBuilder.toString();
+
+ tmp = pre + s.toLowerCase() + post;
+ }
+
+ for (int i = 0; i < tmp.length() - gramLength + 1; i++) {
+ String gram = tmp.substring(i, i + gramLength);
+ grams.add(gram);
+ }
+ }
+
+ @Before
+ public void init() throws Exception {
+ // serialize string into bytes
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutput dos = new DataOutputStream(baos);
+ dos.writeUTF(str);
+ inputBuffer = baos.toByteArray();
+ }
+
+ void runTestNGramTokenizerWithCountedHashedUTF8Tokens(boolean prePost)
+ throws IOException {
+ HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+ NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+ gramLength, prePost, false, false, tokenFactory);
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ ArrayList<String> expectedGrams = new ArrayList<String>();
+ getExpectedGrams(str, gramLength, expectedGrams, prePost);
+ ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+ HashMap<String, Integer> gramCounts = new HashMap<String, Integer>();
+ for (String s : expectedGrams) {
+ Integer count = gramCounts.get(s);
+ if (count == null) {
+ count = 1;
+ gramCounts.put(s, count);
+ } else {
+ count++;
+ }
+
+ int hash = tokenHash(s, count);
+ expectedHashedGrams.add(hash);
+ }
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize hashed token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ Integer hashedGram = in.readInt();
+
+ // System.out.println(hashedGram);
+
+ Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+
+ tokenCount++;
+ }
+ // System.out.println("---------");
+ }
+
+ void runTestNGramTokenizerWithHashedUTF8Tokens(boolean prePost)
+ throws IOException {
+ HashedUTF8NGramTokenFactory tokenFactory = new HashedUTF8NGramTokenFactory();
+ NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+ gramLength, prePost, true, false, tokenFactory);
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ ArrayList<String> expectedGrams = new ArrayList<String>();
+ getExpectedGrams(str, gramLength, expectedGrams, prePost);
+ ArrayList<Integer> expectedHashedGrams = new ArrayList<Integer>();
+ for (String s : expectedGrams) {
+ int hash = tokenHash(s, 1);
+ expectedHashedGrams.add(hash);
+ }
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize hashed token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ Integer hashedGram = in.readInt();
+
+ // System.out.println(hashedGram);
+
+ Assert.assertEquals(expectedHashedGrams.get(tokenCount), hashedGram);
+
+ tokenCount++;
+ }
+ // System.out.println("---------");
+ }
+
+ void runTestNGramTokenizerWithUTF8Tokens(boolean prePost)
+ throws IOException {
+ UTF8NGramTokenFactory tokenFactory = new UTF8NGramTokenFactory();
+ NGramUTF8StringBinaryTokenizer tokenizer = new NGramUTF8StringBinaryTokenizer(
+ gramLength, prePost, true, false, tokenFactory);
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ ArrayList<String> expectedGrams = new ArrayList<String>();
+ getExpectedGrams(str, gramLength, expectedGrams, prePost);
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize hashed token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ String strGram = in.readUTF();
+
+ // System.out.println("\"" + strGram + "\"");
+
+ Assert.assertEquals(expectedGrams.get(tokenCount), strGram);
+
+ tokenCount++;
+ }
+ // System.out.println("---------");
+ }
+
+ @Test
+ public void testNGramTokenizerWithCountedHashedUTF8Tokens()
+ throws Exception {
+ runTestNGramTokenizerWithCountedHashedUTF8Tokens(false);
+ runTestNGramTokenizerWithCountedHashedUTF8Tokens(true);
+ }
+
+ @Test
+ public void testNGramTokenizerWithHashedUTF8Tokens() throws Exception {
+ runTestNGramTokenizerWithHashedUTF8Tokens(false);
+ runTestNGramTokenizerWithHashedUTF8Tokens(true);
+ }
+
+ @Test
+ public void testNGramTokenizerWithUTF8Tokens() throws IOException {
+ runTestNGramTokenizerWithUTF8Tokens(false);
+ runTestNGramTokenizerWithUTF8Tokens(true);
+ }
+
+ public int tokenHash(String token, int tokenCount) {
+ int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+ for (int i = 0; i < token.length(); i++) {
+ h ^= token.charAt(i);
+ h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+ }
+ return h + tokenCount;
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
new file mode 100644
index 0000000..161f20f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
@@ -0,0 +1,292 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SearchResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+
+/**
+ * The purpose of this test is to evaluate the performance of searches against
+ * an inverted index. First, we generate random <token, id> pairs sorted on
+ * token, which are bulk loaded into an inverted index. Next, we build random
+ * queries from a list of predefined tokens in the index, and measure the
+ * performance of executing them with different search modifiers. We test the
+ * ConjunctiveSearchModifier and the JaccardSearchModifier.
+ *
+ */
+public class SearchPerfTest extends AbstractInvIndexSearchTest {
+
+ protected List<String> tokens = new ArrayList<String>();
+
+ @Before
+ public void start() throws Exception {
+ super.start();
+ tokenFactory = new UTF8WordTokenFactory();
+ tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
+ tokenFactory);
+ searcher = new TOccurrenceSearcher(stageletCtx, invIndex, tokenizer);
+ resultCursor = new SearchResultCursor(
+ searcher.createResultFrameTupleAccessor(),
+ searcher.createResultTupleReference());
+ loadData();
+ }
+
+ public void loadData() throws HyracksDataException {
+ tokens.add("compilers");
+ tokens.add("computer");
+ tokens.add("databases");
+ tokens.add("fast");
+ tokens.add("hyracks");
+ tokens.add("major");
+ tokens.add("science");
+ tokens.add("systems");
+ tokens.add("university");
+
+ for (int i = 0; i < tokens.size(); i++) {
+ checkInvLists.add(new ArrayList<Integer>());
+ }
+
+ // for generating length-skewed inverted lists
+ int addProb = 0;
+ int addProbStep = 10;
+
+ IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+ invListTypeTraits);
+ InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
+ invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
+
+ int totalElements = 0;
+ for (int i = 0; i < tokens.size(); i++) {
+
+ addProb += addProbStep * (i + 1);
+ for (int j = 0; j < maxId; j++) {
+ if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+
+ totalElements++;
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(
+ tokens.get(i), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+ tb.addFieldEndOffset();
+
+ checkInvLists.get(i).add(j);
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(),
+ 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
+ invIndex.bulkLoadAddTuple(ctx, tuple);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+ }
+ invIndex.endBulkLoad(ctx);
+ }
+
+ /**
+ * Determine the expected results with the ScanCount algorithm. The
+ * ScanCount algorithm is very simple, so we can be confident the results
+ * are correct.
+ *
+ */
+ protected void fillExpectedResults(int[] queryTokenIndexes,
+ int numQueryTokens, int occurrenceThreshold) {
+ // reset scan count array
+ for (int i = 0; i < maxId; i++) {
+ scanCountArray[i] = 0;
+ }
+
+ // count occurrences
+ for (int i = 0; i < numQueryTokens; i++) {
+ ArrayList<Integer> list = checkInvLists.get(queryTokenIndexes[i]);
+ for (int j = 0; j < list.size(); j++) {
+ scanCountArray[list.get(j)]++;
+ }
+ }
+
+ // check threshold
+ expectedResults.clear();
+ for (int i = 0; i < maxId; i++) {
+ if (scanCountArray[i] >= occurrenceThreshold) {
+ expectedResults.add(i);
+ }
+ }
+ }
+
+ /**
+ * Generates a specified number of queries. Each query consists of a set of
+ * randomly chosen tokens that are picked from the pre-defined set of
+ * tokens. We run each query, measure it's time, and verify it's results
+ * against the results produced by ScanCount, implemented in
+ * fillExpectedResults().
+ *
+ */
+ private void runQueries(IInvertedIndexSearchModifier searchModifier,
+ int numQueries) throws Exception {
+
+ rnd.setSeed(50);
+
+ // generate random queries
+ int[] queryTokenIndexes = new int[tokens.size()];
+ for (int i = 0; i < numQueries; i++) {
+
+ int numQueryTokens = Math.abs(rnd.nextInt() % tokens.size()) + 1;
+ for (int j = 0; j < numQueryTokens; j++) {
+ queryTokenIndexes[j] = Math.abs(rnd.nextInt() % tokens.size());
+ }
+
+ StringBuilder strBuilder = new StringBuilder();
+ for (int j = 0; j < numQueryTokens; j++) {
+ strBuilder.append(tokens.get(queryTokenIndexes[j]));
+ if (j + 1 != numQueryTokens)
+ strBuilder.append(" ");
+ }
+
+ String queryString = strBuilder.toString();
+
+ queryTb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+ queryDos);
+ queryTb.addFieldEndOffset();
+
+ queryAppender.reset(frame, true);
+ queryAppender.append(queryTb.getFieldEndOffsets(),
+ queryTb.getByteArray(), 0, queryTb.getSize());
+ queryTuple.reset(queryAccessor, 0);
+
+ boolean panic = false;
+
+ int repeats = 1;
+ double totalTime = 0;
+ for (int j = 0; j < repeats; j++) {
+ long timeStart = System.currentTimeMillis();
+ try {
+ searcher.reset();
+ searcher.search(resultCursor, queryTuple, 0, searchModifier);
+ } catch (OccurrenceThresholdPanicException e) {
+ panic = true;
+ }
+ long timeEnd = System.currentTimeMillis();
+ totalTime += timeEnd - timeStart;
+ }
+ double avgTime = totalTime / (double) repeats;
+ LOGGER.info(i + ": " + "\"" + queryString + "\": " + avgTime + "ms");
+
+ if (!panic) {
+
+ fillExpectedResults(queryTokenIndexes, numQueryTokens,
+ searcher.getOccurrenceThreshold());
+
+ // verify results
+ int checkIndex = 0;
+ while (resultCursor.hasNext()) {
+ resultCursor.next();
+ ITupleReference resultTuple = resultCursor.getTuple();
+ int id = IntegerSerializerDeserializer.getInt(
+ resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(0));
+ Assert.assertEquals(expectedResults.get(checkIndex)
+ .intValue(), id);
+ checkIndex++;
+ }
+
+ if (expectedResults.size() != checkIndex) {
+ LOGGER.info("CHECKING");
+ StringBuilder expectedStrBuilder = new StringBuilder();
+ for (Integer x : expectedResults) {
+ expectedStrBuilder.append(x + " ");
+ }
+ LOGGER.info(expectedStrBuilder.toString());
+ }
+
+ Assert.assertEquals(expectedResults.size(), checkIndex);
+ }
+ }
+ }
+
+ /**
+ * Runs 50 random conjunctive search queries to test the
+ * ConjunctiveSearchModifier.
+ *
+ */
+ @Test
+ public void conjunctiveKeywordQueryTest() throws Exception {
+ IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+ runQueries(searchModifier, 50);
+ }
+
+ /**
+ * Runs 50 random jaccard-based search queries with thresholds 1.0, 0.9,
+ * 0.8, 0.7, 0.6, 0.5. Tests the JaccardSearchModifier.
+ *
+ */
+ @Test
+ public void jaccardKeywordQueryTest() throws Exception {
+ JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+
+ LOGGER.info("JACCARD: " + 1.0f);
+ searchModifier.setJaccThresh(1.0f);
+ runQueries(searchModifier, 50);
+
+ LOGGER.info("JACCARD: " + 0.9f);
+ searchModifier.setJaccThresh(0.9f);
+ runQueries(searchModifier, 50);
+
+ LOGGER.info("JACCARD: " + 0.8f);
+ searchModifier.setJaccThresh(0.8f);
+ runQueries(searchModifier, 50);
+
+ LOGGER.info("JACCARD: " + 0.7f);
+ searchModifier.setJaccThresh(0.7f);
+ runQueries(searchModifier, 50);
+
+ LOGGER.info("JACCARD: " + 0.6f);
+ searchModifier.setJaccThresh(0.6f);
+ runQueries(searchModifier, 50);
+
+ LOGGER.info("JACCARD: " + 0.5f);
+ searchModifier.setJaccThresh(0.5f);
+ runQueries(searchModifier, 50);
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
new file mode 100644
index 0000000..d9fef2c
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -0,0 +1,276 @@
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SearchResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.EditDistanceSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.NGramUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8NGramTokenFactory;
+
+public class SearchTest extends AbstractInvIndexSearchTest {
+
+ protected List<String> dataStrings = new ArrayList<String>();
+ protected List<String> firstNames = new ArrayList<String>();
+ protected List<String> lastNames = new ArrayList<String>();
+
+ @Before
+ public void start() throws Exception {
+ super.start();
+ tokenFactory = new UTF8NGramTokenFactory();
+ tokenizer = new NGramUTF8StringBinaryTokenizer(3, false, true, false,
+ tokenFactory);
+ searcher = new TOccurrenceSearcher(stageletCtx, invIndex, tokenizer);
+ resultCursor = new SearchResultCursor(
+ searcher.createResultFrameTupleAccessor(),
+ searcher.createResultTupleReference());
+ generateDataStrings();
+ loadData();
+ }
+
+ public void generateDataStrings() {
+ firstNames.add("Kathrin");
+ firstNames.add("Cathrin");
+ firstNames.add("Kathryn");
+ firstNames.add("Cathryn");
+ firstNames.add("Kathrine");
+ firstNames.add("Cathrine");
+ firstNames.add("Kathryne");
+ firstNames.add("Cathryne");
+ firstNames.add("Katherin");
+ firstNames.add("Catherin");
+ firstNames.add("Katheryn");
+ firstNames.add("Catheryn");
+ firstNames.add("Katherine");
+ firstNames.add("Catherine");
+ firstNames.add("Katheryne");
+ firstNames.add("Catheryne");
+ firstNames.add("John");
+ firstNames.add("Jack");
+ firstNames.add("Jonathan");
+ firstNames.add("Nathan");
+
+ lastNames.add("Miller");
+ lastNames.add("Myller");
+ lastNames.add("Keller");
+ lastNames.add("Ketler");
+ lastNames.add("Muller");
+ lastNames.add("Fuller");
+ lastNames.add("Smith");
+ lastNames.add("Smyth");
+ lastNames.add("Smithe");
+ lastNames.add("Smythe");
+
+ // Generate all 'firstName lastName' combinations as data strings
+ for (String f : firstNames) {
+ for (String l : lastNames) {
+ dataStrings.add(f + " " + l);
+ }
+ }
+ }
+
+ private class TokenIdPair implements Comparable<TokenIdPair> {
+ public ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ public DataOutputStream dos = new DataOutputStream(baaos);
+ public int id;
+
+ TokenIdPair(IToken token, int id) throws IOException {
+ token.serializeToken(dos);
+ this.id = id;
+ }
+
+ @Override
+ public int compareTo(TokenIdPair o) {
+ int cmp = btreeBinCmps[0].compare(baaos.getByteArray(), 0,
+ baaos.getByteArray().length, o.baaos.getByteArray(), 0,
+ o.baaos.getByteArray().length);
+ if (cmp == 0) {
+ return id - o.id;
+ } else {
+ return cmp;
+ }
+ }
+ }
+
+ public void loadData() throws IOException {
+ List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
+ // generate pairs for subsequent sorting and bulk-loading
+ int id = 0;
+ for (String s : dataStrings) {
+ ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+ DataOutputStream dos = new DataOutputStream(baaos);
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(s, dos);
+ tokenizer.reset(baaos.getByteArray(), 0, baaos.size());
+ int tokenCount = 0;
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+ IToken token = tokenizer.getToken();
+ pairs.add(new TokenIdPair(token, id));
+ ++tokenCount;
+ }
+ ++id;
+ }
+ Collections.sort(pairs);
+
+ // bulk load index
+ IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
+ invListTypeTraits);
+ InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
+ invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
+
+ for (TokenIdPair t : pairs) {
+ tb.reset();
+ tb.addField(t.baaos.getByteArray(), 0,
+ t.baaos.getByteArray().length);
+ IntegerSerializerDeserializer.INSTANCE.serialize(t.id, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ try {
+ invIndex.bulkLoadAddTuple(ctx, tuple);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ invIndex.endBulkLoad(ctx);
+ }
+
+ /**
+ * Runs a specified number of randomly picked strings from dataStrings as
+ * queries. We run each query, measure it's time, and print it's results.
+ *
+ */
+ private void runQueries(IInvertedIndexSearchModifier searchModifier,
+ int numQueries) throws Exception {
+
+ rnd.setSeed(50);
+
+ for (int i = 0; i < numQueries; i++) {
+
+ int queryIndex = Math.abs(rnd.nextInt() % dataStrings.size());
+ String queryString = dataStrings.get(queryIndex);
+
+ queryTb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
+ queryDos);
+ queryTb.addFieldEndOffset();
+
+ queryAppender.reset(frame, true);
+ queryAppender.append(queryTb.getFieldEndOffsets(),
+ queryTb.getByteArray(), 0, queryTb.getSize());
+ queryTuple.reset(queryAccessor, 0);
+
+ int repeats = 1;
+ double totalTime = 0;
+ for (int j = 0; j < repeats; j++) {
+ long timeStart = System.currentTimeMillis();
+ try {
+ searcher.reset();
+ searcher.search(resultCursor, queryTuple, 0, searchModifier);
+ } catch (OccurrenceThresholdPanicException e) {
+ // ignore panic queries
+ }
+ long timeEnd = System.currentTimeMillis();
+ totalTime += timeEnd - timeStart;
+ }
+ double avgTime = totalTime / (double) repeats;
+ StringBuilder strBuilder = new StringBuilder();
+ strBuilder.append(i + ": " + "\"" + queryString + "\": " + avgTime
+ + "ms" + "\n");
+ strBuilder.append("CANDIDATE RESULTS:\n");
+ while (resultCursor.hasNext()) {
+ resultCursor.next();
+ ITupleReference resultTuple = resultCursor.getTuple();
+ int id = IntegerSerializerDeserializer.getInt(
+ resultTuple.getFieldData(0),
+ resultTuple.getFieldStart(0));
+ strBuilder.append(id + " " + dataStrings.get(id));
+ strBuilder.append('\n');
+ }
+ // remove trailing newline
+ strBuilder.deleteCharAt(strBuilder.length() - 1);
+ LOGGER.info(strBuilder.toString());
+ }
+ }
+
+ /**
+ * Runs 5 random conjunctive search queries to test the
+ * ConjunctiveSearchModifier.
+ *
+ */
+ @Test
+ public void conjunctiveQueryTest() throws Exception {
+ IInvertedIndexSearchModifier searchModifier = new ConjunctiveSearchModifier();
+ runQueries(searchModifier, 5);
+ }
+
+ /**
+ * Runs 5 random jaccard-based search queries with thresholds 0.9, 0.8, 0.7.
+ * Tests the JaccardSearchModifier.
+ *
+ */
+ @Test
+ public void jaccardQueryTest() throws Exception {
+ JaccardSearchModifier searchModifier = new JaccardSearchModifier(1.0f);
+
+ LOGGER.info("JACCARD: " + 0.9f);
+ searchModifier.setJaccThresh(0.9f);
+ runQueries(searchModifier, 5);
+
+ LOGGER.info("JACCARD: " + 0.8f);
+ searchModifier.setJaccThresh(0.8f);
+ runQueries(searchModifier, 5);
+
+ LOGGER.info("JACCARD: " + 0.7f);
+ searchModifier.setJaccThresh(0.7f);
+ runQueries(searchModifier, 5);
+ }
+
+ /**
+ * Runs 5 random edit-distance based search queries with thresholds 1, 2, 3.
+ * Tests the EditDistanceSearchModifier.
+ *
+ */
+ @Test
+ public void editDistanceQueryTest() throws Exception {
+ EditDistanceSearchModifier searchModifier = new EditDistanceSearchModifier(
+ 3, 0);
+
+ LOGGER.info("EDIT DISTANCE: " + 1);
+ searchModifier.setEdThresh(1);
+ runQueries(searchModifier, 5);
+
+ LOGGER.info("EDIT DISTANCE: " + 2);
+ searchModifier.setEdThresh(2);
+ runQueries(searchModifier, 5);
+
+ LOGGER.info("EDIT DISTANCE: " + 3);
+ searchModifier.setEdThresh(3);
+ runQueries(searchModifier, 5);
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
new file mode 100644
index 0000000..57fe306
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/WordTokenizerTest.java
@@ -0,0 +1,222 @@
+/**
+ * Copyright 2010-2011 The Regents of the University of California
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on
+ * an "AS IS"; BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations under
+ * the License.
+ *
+ * Author: Alexander Behm <abehm (at) ics.uci.edu>
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import junit.framework.Assert;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.AbstractUTF8Token;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.HashedUTF8WordTokenFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IToken;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.UTF8WordTokenFactory;
+
+public class WordTokenizerTest {
+
+ private String text = "Hello World, I would like to inform you of the importance of Foo Bar. Yes, Foo Bar. Jürgen.";
+ private byte[] inputBuffer;
+
+ private ArrayList<String> expectedUTF8Tokens = new ArrayList<String>();
+ private ArrayList<Integer> expectedHashedUTF8Tokens = new ArrayList<Integer>();
+ private ArrayList<Integer> expectedCountedHashedUTF8Tokens = new ArrayList<Integer>();
+
+ @Before
+ public void init() throws IOException {
+ // serialize text into bytes
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ DataOutput dos = new DataOutputStream(baos);
+ dos.writeUTF(text);
+ inputBuffer = baos.toByteArray();
+
+ // init expected string tokens
+ expectedUTF8Tokens.add("hello");
+ expectedUTF8Tokens.add("world");
+ expectedUTF8Tokens.add("i");
+ expectedUTF8Tokens.add("would");
+ expectedUTF8Tokens.add("like");
+ expectedUTF8Tokens.add("to");
+ expectedUTF8Tokens.add("inform");
+ expectedUTF8Tokens.add("you");
+ expectedUTF8Tokens.add("of");
+ expectedUTF8Tokens.add("the");
+ expectedUTF8Tokens.add("importance");
+ expectedUTF8Tokens.add("of");
+ expectedUTF8Tokens.add("foo");
+ expectedUTF8Tokens.add("bar");
+ expectedUTF8Tokens.add("yes");
+ expectedUTF8Tokens.add("foo");
+ expectedUTF8Tokens.add("bar");
+ expectedUTF8Tokens.add("jürgen");
+
+ // hashed tokens ignoring token count
+ for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
+ int hash = tokenHash(expectedUTF8Tokens.get(i), 1);
+ expectedHashedUTF8Tokens.add(hash);
+ }
+
+ // hashed tokens using token count
+ HashMap<String, Integer> tokenCounts = new HashMap<String, Integer>();
+ for (int i = 0; i < expectedUTF8Tokens.size(); i++) {
+ Integer count = tokenCounts.get(expectedUTF8Tokens.get(i));
+ if (count == null) {
+ count = 1;
+ tokenCounts.put(expectedUTF8Tokens.get(i), count);
+ } else {
+ count++;
+ }
+
+ int hash = tokenHash(expectedUTF8Tokens.get(i), count);
+ expectedCountedHashedUTF8Tokens.add(hash);
+ }
+ }
+
+ @Test
+ public void testWordTokenizerWithCountedHashedUTF8Tokens()
+ throws IOException {
+
+ HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
+ DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(
+ false, false, tokenFactory);
+
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ Integer hashedToken = in.readInt();
+
+ // System.out.println(hashedToken);
+
+ Assert.assertEquals(hashedToken,
+ expectedCountedHashedUTF8Tokens.get(tokenCount));
+
+ tokenCount++;
+ }
+ }
+
+ @Test
+ public void testWordTokenizerWithHashedUTF8Tokens() throws IOException {
+
+ HashedUTF8WordTokenFactory tokenFactory = new HashedUTF8WordTokenFactory();
+ DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(
+ true, false, tokenFactory);
+
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ Integer hashedToken = in.readInt();
+
+ // System.out.println(hashedToken);
+
+ Assert.assertEquals(expectedHashedUTF8Tokens.get(tokenCount),
+ hashedToken);
+
+ tokenCount++;
+ }
+ }
+
+ @Test
+ public void testWordTokenizerWithUTF8Tokens() throws IOException {
+
+ UTF8WordTokenFactory tokenFactory = new UTF8WordTokenFactory();
+ DelimitedUTF8StringBinaryTokenizer tokenizer = new DelimitedUTF8StringBinaryTokenizer(
+ true, false, tokenFactory);
+
+ tokenizer.reset(inputBuffer, 0, inputBuffer.length);
+
+ int tokenCount = 0;
+
+ while (tokenizer.hasNext()) {
+ tokenizer.next();
+
+ // serialize hashed token
+ ByteArrayOutputStream tokenBaos = new ByteArrayOutputStream();
+ DataOutput tokenDos = new DataOutputStream(tokenBaos);
+
+ IToken token = tokenizer.getToken();
+ token.serializeToken(tokenDos);
+
+ // deserialize token
+ ByteArrayInputStream bais = new ByteArrayInputStream(
+ tokenBaos.toByteArray());
+ DataInput in = new DataInputStream(bais);
+
+ String strToken = in.readUTF();
+
+ // System.out.println(strToken);
+
+ Assert.assertEquals(expectedUTF8Tokens.get(tokenCount), strToken);
+
+ tokenCount++;
+ }
+ }
+
+ // JAQL
+ public int tokenHash(String token, int tokenCount) {
+ int h = AbstractUTF8Token.GOLDEN_RATIO_32;
+ for (int i = 0; i < token.length(); i++) {
+ h ^= token.charAt(i);
+ h *= AbstractUTF8Token.GOLDEN_RATIO_32;
+ }
+ return h + tokenCount;
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/AbstractInvIndexTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/AbstractInvIndexTest.java
deleted file mode 100644
index 5d2cfff..0000000
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/AbstractInvIndexTest.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.invertedindex.searchers;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-import org.junit.AfterClass;
-
-public abstract class AbstractInvIndexTest {
-
- protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
- protected final static String tmpDir = System.getProperty("java.io.tmpdir");
- protected final static String sep = System.getProperty("file.separator");
- protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-
- protected void print(String str) {
- System.out.print(str);
- }
-
- @AfterClass
- public static void cleanup() throws Exception {
- File f = new File(fileName);
- f.deleteOnExit();
- }
-}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
deleted file mode 100644
index f61249b..0000000
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.searchers;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Random;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SimpleConjunctiveSearcher;
-import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class SimpleConjunctiveSearcherTest extends AbstractInvIndexTest {
-
- // testing params
- // private static final int PAGE_SIZE = 256;
- // private static final int NUM_PAGES = 10;
- // private static final int HYRACKS_FRAME_SIZE = 256;
-
- // realistic params
- // private static final int PAGE_SIZE = 65536;
- private static final int PAGE_SIZE = 32768;
- private static final int NUM_PAGES = 10;
- private static final int HYRACKS_FRAME_SIZE = 32768;
- private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- public class BufferAllocator implements ICacheMemoryAllocator {
- @Override
- public ByteBuffer[] allocate(int pageSize, int numPages) {
- ByteBuffer[] buffers = new ByteBuffer[numPages];
- for (int i = 0; i < numPages; ++i) {
- buffers[i] = ByteBuffer.allocate(pageSize);
- }
- return buffers;
- }
- }
-
- @Test
- public void test01() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(4);
-
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
-
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
-
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
- // IBTreeLeafFrameFactory leafFrameFactory = new
- // FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
- IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
- IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
-
- IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
- IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
- IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
-
- BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
-
- ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor btreeRecDesc = new RecordDescriptor(btreeSerde);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), btreeRecDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- List<String> tokens = new ArrayList<String>();
- tokens.add("computer");
- tokens.add("hyracks");
- tokens.add("fast");
- tokens.add("university");
- tokens.add("science");
- tokens.add("major");
-
- int maxId = 10000;
- int addProb = 0;
- int addProbStep = 2;
-
- BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
-
- for (int i = 0; i < tokens.size(); i++) {
-
- addProb += addProbStep;
- for (int j = 0; j < maxId; j++) {
- if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
- tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- try {
- btree.insert(tuple, opCtx);
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
- }
- }
-
- int numPages = btree.getMaxPage(metaFrame);
- System.out.println("NUMPAGES: " + numPages);
-
- // build query as tuple reference
- ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
-
- FrameTupleAppender queryAppender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
- DataOutput queryDos = queryTb.getDataOutput();
-
- IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(ctx.getFrameSize(), queryRecDesc);
- queryAccessor.reset(frame);
- FrameTupleReference queryTuple = new FrameTupleReference();
-
- String query = "computer hyracks fast";
- char queryDelimiter = ' ';
- IBinaryTokenizer queryTokenizer = new DelimitedUTF8StringBinaryTokenizer(queryDelimiter);
-
- queryTb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(query, queryDos);
- queryTb.addFieldEndOffset();
-
- queryAppender.reset(frame, true);
- queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
- queryTuple.reset(queryAccessor, 0);
-
- int numKeyFields = 1;
- int numValueFields = 1;
- ISerializerDeserializer[] resultSerde = new ISerializerDeserializer[numValueFields];
- for (int i = 0; i < numValueFields; i++) {
- resultSerde[i] = btreeSerde[numKeyFields + i];
- }
- RecordDescriptor resultRecDesc = new RecordDescriptor(resultSerde);
- FrameTupleAccessor resultAccessor = new FrameTupleAccessor(ctx.getFrameSize(), resultRecDesc);
- FrameTupleReference resultTuple = new FrameTupleReference();
-
- SimpleConjunctiveSearcher searcher = new SimpleConjunctiveSearcher(ctx, btree, btreeRecDesc, queryTokenizer,
- numKeyFields, numValueFields);
-
- long timeStart = System.currentTimeMillis();
- searcher.search(queryTuple, 0);
- long timeEnd = System.currentTimeMillis();
- System.out.println("SEARCH TIME: " + (timeEnd - timeStart) + "ms");
-
- // System.out.println("INTERSECTION RESULTS");
- IInvertedIndexResultCursor resultCursor = searcher.getResultCursor();
- while (resultCursor.hasNext()) {
- resultCursor.next();
- resultAccessor.reset(resultCursor.getBuffer());
- for (int i = 0; i < resultAccessor.getTupleCount(); i++) {
- resultTuple.reset(resultAccessor, i);
- for (int j = 0; j < resultTuple.getFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(resultTuple.getFieldData(j), resultTuple
- .getFieldStart(j), resultTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = resultSerde[j].deserialize(dataIn);
- System.out.print(o + " ");
- }
- System.out.println();
- }
- }
-
- /*
- * IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- * searchCmps[0] =
- * UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- * MultiComparator searchCmp = new MultiComparator(typeTraits,
- * searchCmps);
- *
- * // ordered scan IBTreeCursor scanCursor = new
- * RangeSearchCursor(leafFrame); RangePredicate nullPred = new
- * RangePredicate(true, null, null, true, true, null); BTreeOpContext
- * searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame,
- * interiorFrame, metaFrame); btree.search(scanCursor, nullPred,
- * searchOpCtx);
- *
- * try { while (scanCursor.hasNext()) { scanCursor.next();
- * ITupleReference frameTuple = scanCursor.getTuple(); String rec =
- * cmp.printTuple(frameTuple, btreeSerde); System.out.println(rec); } }
- * catch (Exception e) { e.printStackTrace(); } finally {
- * scanCursor.close(); }
- */
-
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
- }
-}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
deleted file mode 100644
index 47c75cf..0000000
--- a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.util.ArrayList;
-import java.util.Random;
-
-import org.junit.Assert;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ByteArrayAccessibleOutputStream;
-import edu.uci.ics.hyracks.dataflow.common.data.hash.UTF8StringBinaryHashFunctionFactory;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-
-public class TokenizerTest {
-
- // testing DelimitedUTF8StringBinaryTokenizer
- @Test
- public void test01() throws Exception {
- Random rnd = new Random(50);
-
- int numDocs = 100;
- int maxWords = 1000;
- int maxWordLength = 50;
- char delimiter = ' ';
-
- DelimitedUTF8StringBinaryTokenizer tok = new DelimitedUTF8StringBinaryTokenizer(delimiter);
-
- // create a bunch of documents
- for (int i = 0; i < numDocs; i++) {
-
- // create a single document with a bunch of words
- int words = (Math.abs(rnd.nextInt()) % maxWords) + 1;
- StringBuilder strBuilder = new StringBuilder();
- for (int j = 0; j < words; j++) {
- int len = (Math.abs(rnd.nextInt()) % maxWordLength) + 1;
- String s = randomString(len, rnd);
- strBuilder.append(s);
- if (j < words - 1)
- strBuilder.append(delimiter);
- }
-
- String doc = strBuilder.toString();
-
- // serialize document into baaos
- ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
- DataOutputStream dos = new DataOutputStream(baaos);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(doc, dos);
- byte[] data = baaos.toByteArray();
-
- // use binary tokenizer and compare with Java tokenizer
- String[] cmpTokens = doc.split(new String(new char[] { delimiter }));
- int cmpCounter = 0;
-
- tok.reset(data, 0, data.length);
- while (tok.hasNext()) {
- tok.next();
-
- // write token to outputstream
- ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
- tok.writeToken(dosWrite);
-
- // deserialize token to get string object
- ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
- DataInput dataIn = new DataInputStream(inStream);
- String s = UTF8StringSerializerDeserializer.INSTANCE.deserialize(dataIn);
-
- Assert.assertEquals(s, cmpTokens[cmpCounter++]);
- }
- }
- }
-
- // testing HashedQGramUTF8StringBinaryTokenizer
- @Test
- public void test02() throws Exception {
- Random rnd = new Random(50);
-
- int numStrings = 1000;
- int maxStrLen = 100;
- int minQ = 2;
- int maxQ = 10;
-
- // we test the correctness of HashedQGramUTF8StringBinaryTokenizer as
- // follows:
- // 1.1. tokenize the string into q-gram strings
- // 1.2. serialize q-gram strings into bytes
- // 1.3. compute hashed gram with UTF8StringBinaryHashFunctionFactory
- // 2.1. serialize string into bytes
- // 2.2. tokenize serialized string into hashed q-grams
- // 2.3. test whether hashed grams from 1.3. and 2.3. are equal
- for (int i = 0; i < numStrings; i++) {
- int q = (Math.abs(rnd.nextInt()) % (maxQ - minQ)) + minQ;
- int strLen = (Math.abs(rnd.nextInt()) % (maxStrLen - q)) + q;
- String str = randomString(strLen, rnd);
-
- // randomly choose pre and postfixing
- boolean prePost = false;
- if (Math.abs(rnd.nextInt()) % 2 == 0)
- prePost = true;
-
- HashedQGramUTF8StringBinaryTokenizer qgramTok = new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
-
- String extendedString = str;
- if (prePost) {
- // pre and postfix string
- StringBuilder strBuilder = new StringBuilder();
- for (int j = 0; j < q - 1; j++)
- strBuilder.append(qgramTok.getPreChar());
- strBuilder.append(str);
- for (int j = 0; j < q - 1; j++)
- strBuilder.append(qgramTok.getPostChar());
- extendedString = strBuilder.toString();
- }
-
- // generate q-grams in deserialized form
- ArrayList<String> javaGrams = new ArrayList<String>();
- for (int j = 0; j < extendedString.length() - q + 1; j++) {
- javaGrams.add(extendedString.substring(j, j + q));
- }
-
- // serialize string for use in binary gram tokenizer
- ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
- DataOutputStream dos = new DataOutputStream(baaos);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(str, dos);
- byte[] data = baaos.toByteArray();
-
- qgramTok.reset(data, 0, data.length);
-
- int counter = 0;
- while (qgramTok.hasNext()) {
- qgramTok.next();
-
- // write token to outputstream
- ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
- qgramTok.writeToken(dosWrite);
-
- // deserialize token to get hashed gram
- ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
- DataInput dataIn = new DataInputStream(inStream);
- Integer binHashedGram = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
-
- // create hashed gram to test against
- ByteArrayAccessibleOutputStream baaosCmp = new ByteArrayAccessibleOutputStream();
- DataOutputStream dosCmp = new DataOutputStream(baaosCmp);
- UTF8StringSerializerDeserializer.INSTANCE.serialize(javaGrams.get(counter), dosCmp);
-
- IBinaryHashFunction strHasher = UTF8StringBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
- byte[] cmpData = baaosCmp.toByteArray();
- int cmpHash = strHasher.hash(cmpData, 0, cmpData.length);
-
- Assert.assertEquals(binHashedGram.intValue(), cmpHash);
-
- counter++;
- }
- }
- }
-
- public static String randomString(int length, Random random) {
- int maxAttempts = 1000;
- int count = 0;
- while (count < maxAttempts) {
- String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- if (strBuilder.length() > 0)
- return strBuilder.toString();
- count++;
- }
- return "abc";
- }
-}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/.classpath b/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
new file mode 100644
index 0000000..f2cc5f7
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" output="target/test-classes" path="src/test/java"/>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+ <classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
+ <classpathentry kind="output" path="target/classes"/>
+</classpath>
diff --git a/hyracks-storage-am-invertedindex/.project b/hyracks-tests/hyracks-storage-am-rtree-test/.project
similarity index 91%
copy from hyracks-storage-am-invertedindex/.project
copy to hyracks-tests/hyracks-storage-am-rtree-test/.project
index 9303fb7..ea7e36b 100644
--- a/hyracks-storage-am-invertedindex/.project
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/.project
@@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
- <name>hyracks-storage-am-invertedindex</name>
+ <name>hyracks-storage-am-rtree-test</name>
<comment></comment>
<projects>
</projects>
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs b/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..375e12e
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,6 @@
+#Fri May 20 19:34:07 PDT 2011
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs b/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..99b89a6
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,9 @@
+#Thu Jan 06 11:27:16 PST 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+includeModules=false
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
new file mode 100644
index 0000000..6673481a
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -0,0 +1,55 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree-test</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-tests</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-nc</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-rtree</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-test-support</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
new file mode 100644
index 0000000..662d803
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import org.junit.AfterClass;
+
+public abstract class AbstractRTreeTest {
+
+ protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final static String sep = System.getProperty("file.separator");
+ protected final static String fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+
+ protected void print(String str) {
+ System.out.print(str);
+ }
+
+ @AfterClass
+ public static void cleanup() throws Exception {
+ File f = new File(fileName);
+ f.deleteOnExit();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
new file mode 100644
index 0000000..1ed8dd1
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
@@ -0,0 +1,570 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
+import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class RTreeTest extends AbstractRTreeTest {
+
+ private static final int PAGE_SIZE = 8192;
+ private static final int NUM_PAGES = 20;
+ private static final int MAX_OPEN_FILES = 20;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ // create an R-tree of two dimensions
+ // fill the R-tree with random values using insertions
+ // perform ordered scan
+ @Test
+ public void test01() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 4;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 7;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(8);
+ typeTraits[5] = new TypeTrait(4);
+ typeTraits[6] = new TypeTrait(8);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ Random rnd2 = new Random();
+ rnd2.setSeed(50);
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ double pk1 = rnd2.nextDouble();
+ int pk2 = rnd2.nextInt();
+ double pk3 = rnd2.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // rtree.printTree(leafFrame, interiorFrame, recDescSers);
+ // System.out.println();
+
+ String rtreeStats = rtree.printStats();
+ print(rtreeStats);
+
+ // disk-order scan
+ print("DISK-ORDER SCAN:\n");
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+ RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
+ rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+ rtree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ String string = stats.toString();
+ System.out.println(string);
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ }
+
+ // create an R-tree of two dimensions
+ // fill the R-tree with random values using insertions
+ // and then delete all the tuples which result of an empty R-tree
+ @Test
+ public void test02() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 4;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 7;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(8);
+ typeTraits[5] = new TypeTrait(4);
+ typeTraits[6] = new TypeTrait(8);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // rtree.printTree(leafFrame, interiorFrame, recDescSers);
+ // System.out.println();
+
+ String rtreeStats = rtree.printStats();
+ print(rtreeStats);
+
+ RTreeOpContext deleteOpCtx = rtree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
+ rnd.setSeed(50);
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.delete(tuple, deleteOpCtx);
+
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+ rtree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ String string = stats.toString();
+ System.out.println(string);
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ }
+
+ // create an R-tree of three dimensions
+ // fill the R-tree with random values using insertions
+ // perform ordered scan
+ @Test
+ public void test03() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 6;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+ cmps[4] = cmps[0];
+ cmps[5] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 9;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(8);
+ typeTraits[5] = new TypeTrait(8);
+ typeTraits[6] = new TypeTrait(8);
+ typeTraits[7] = new TypeTrait(4);
+ typeTraits[8] = new TypeTrait(8);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p1z = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+ double p2z = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1z, p2z), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1z, p2z), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.min(p1z, p2z)
+ + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " " + Math.max(p1z, p2z) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // rtree.printTree(leafFrame, interiorFrame, recDescSers);
+ // System.out.println();
+
+ String rtreeStats = rtree.printStats();
+ print(rtreeStats);
+
+ // disk-order scan
+ print("DISK-ORDER SCAN:\n");
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+ RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
+ rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+ rtree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ String string = stats.toString();
+ System.out.println(string);
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
new file mode 100644
index 0000000..195213b
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
@@ -0,0 +1,242 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.DoubleBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class SearchCursorTest extends AbstractRTreeTest {
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ // create an R-tree of two dimensions
+ // fill the R-tree with random values using insertions
+ // and then perform range search
+ @Test
+ public void searchCursorTest() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 4;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 5;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(4);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
+ IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ int pk = rnd.nextInt();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ for (int i = 0; i < 50; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ int pk = rnd.nextInt();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ print(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+
+ ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
+ SearchPredicate searchPredicate = new SearchPredicate(tuple, cmp);
+
+ RTreeOpContext searchOpCtx = rtree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, metaFrame);
+ rtree.search(searchCursor, searchPredicate, searchOpCtx);
+
+ ArrayList<Integer> results = new ArrayList<Integer>();
+ try {
+ while (searchCursor.hasNext()) {
+ searchCursor.next();
+ ITupleReference frameTuple = searchCursor.getTuple();
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(4),
+ frameTuple.getFieldStart(4), frameTuple.getFieldLength(4));
+ DataInput dataIn = new DataInputStream(inStream);
+ Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+ results.add(res);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ searchCursor.close();
+ }
+
+ System.out.println("There are " + results.size() + " objects that satisfy the query");
+ }
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks-tests/hyracks-storage-common-test/pom.xml
new file mode 100644
index 0000000..1468c78
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -0,0 +1,54 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-common-test</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-tests</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-control-nc</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-common</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-test-support</artifactId>
+ <version>0.1.7-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java b/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
new file mode 100644
index 0000000..680fd3b
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheTest.java
@@ -0,0 +1,311 @@
+package edu.uci.ics.hyracks.storage.common;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BufferCacheTest {
+ protected static final List<String> openedFiles = new ArrayList<String>();
+ protected static final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected static final String tmpDir = System.getProperty("java.io.tmpdir");
+ protected static final String sep = System.getProperty("file.separator");
+
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 20;
+ private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ private static final Random rnd = new Random(50);
+
+ private String getFileName() {
+ String fileName = tmpDir + sep + simpleDateFormat.format(new Date()) + openedFiles.size();
+ openedFiles.add(fileName);
+ return fileName;
+ }
+
+ @Test
+ public void simpleOpenPinCloseTest() throws HyracksDataException {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ int num = 10;
+ int testPageId = 0;
+
+ bufferCache.openFile(fileId);
+
+ ICachedPage page = null;
+
+ // tryPin should fail
+ page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
+ Assert.assertNull(page);
+
+ // pin page should succeed
+ page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
+ page.acquireWriteLatch();
+ try {
+ for(int i = 0; i < num; i++) {
+ page.getBuffer().putInt(i * 4, i);
+ }
+
+ // try pin should succeed
+ ICachedPage page2 = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
+ Assert.assertNotNull(page2);
+ bufferCache.unpin(page2);
+
+ } finally {
+ page.releaseWriteLatch();
+ bufferCache.unpin(page);
+ }
+
+ bufferCache.closeFile(fileId);
+
+ boolean exceptionThrown = false;
+
+ // tryPin should fail since file is not open
+ try {
+ page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+
+ // pin should fail since file is not open
+ exceptionThrown = false;
+ try {
+ page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), false);
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+
+
+ // open file again
+ bufferCache.openFile(fileId);
+
+ // tryPin should succeed because page should still be cached
+ page = bufferCache.tryPin(BufferedFileHandle.getDiskPageId(fileId, testPageId));
+ Assert.assertNotNull(page);
+ page.acquireReadLatch();
+ try {
+ // verify contents of page
+ for(int i = 0; i < num; i++) {
+ Assert.assertEquals(page.getBuffer().getInt(i * 4), i);
+ }
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
+
+ @Test
+ public void simpleMaxOpenFilesTest() throws HyracksDataException {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+
+ List<Integer> fileIds = new ArrayList<Integer>();
+
+ for(int i = 0; i < MAX_OPEN_FILES; i++) {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ fileIds.add(fileId);
+ }
+
+ boolean exceptionThrown = false;
+
+ // since all files are open, next open should fail
+ try {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+
+ // close a random file
+ int ix = Math.abs(rnd.nextInt()) % fileIds.size();
+ bufferCache.closeFile(fileIds.get(ix));
+ fileIds.remove(ix);
+
+ // now open should succeed again
+ exceptionThrown = false;
+ try {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ fileIds.add(fileId);
+
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertFalse(exceptionThrown);
+
+ for(Integer i : fileIds) {
+ bufferCache.closeFile(i.intValue());
+ }
+
+ bufferCache.close();
+ }
+
+ @Test
+ public void contentCheckingMaxOpenFilesTest() throws HyracksDataException {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+
+ List<Integer> fileIds = new ArrayList<Integer>();
+ Map<Integer, ArrayList<Integer>> pageContents = new HashMap<Integer, ArrayList<Integer>>();
+ int num = 10;
+ int testPageId = 0;
+
+ // open max number of files and write some stuff into their first page
+ for(int i = 0; i < MAX_OPEN_FILES; i++) {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ fileIds.add(fileId);
+
+ ICachedPage page = null;
+ page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, testPageId), true);
+ page.acquireWriteLatch();
+ try {
+ ArrayList<Integer> values = new ArrayList<Integer>();
+ for(int j = 0; j < num; j++) {
+ int x = Math.abs(rnd.nextInt());
+ page.getBuffer().putInt(j * 4, x);
+ values.add(x);
+ }
+ pageContents.put(fileId, values);
+ } finally {
+ page.releaseWriteLatch();
+ bufferCache.unpin(page);
+ }
+ }
+
+ boolean exceptionThrown = false;
+
+ // since all files are open, next open should fail
+ try {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+
+ // close a few random files
+ ArrayList<Integer> closedFileIds = new ArrayList<Integer>();
+ int filesToClose = 5;
+ for(int i = 0; i < filesToClose; i++) {
+ int ix = Math.abs(rnd.nextInt()) % fileIds.size();
+ bufferCache.closeFile(fileIds.get(ix));
+ closedFileIds.add(fileIds.get(ix));
+ fileIds.remove(ix);
+ }
+
+ // now open a few new files
+ for(int i = 0; i < filesToClose; i++) {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ fileIds.add(fileId);
+ }
+
+ // since all files are open, next open should fail
+ try {
+ String fileName = getFileName();
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+ } catch(HyracksDataException e) {
+ exceptionThrown = true;
+ }
+ Assert.assertTrue(exceptionThrown);
+
+ // close a few random files again
+ for(int i = 0; i < filesToClose; i++) {
+ int ix = Math.abs(rnd.nextInt()) % fileIds.size();
+ bufferCache.closeFile(fileIds.get(ix));
+ closedFileIds.add(fileIds.get(ix));
+ fileIds.remove(ix);
+ }
+
+ // now open those closed files again and verify their contents
+ for(int i = 0; i < filesToClose; i++) {
+ int closedFileId = closedFileIds.get(i);
+ bufferCache.openFile(closedFileId);
+ fileIds.add(closedFileId);
+
+ // pin first page and verify contents
+ ICachedPage page = null;
+ page = bufferCache.pin(BufferedFileHandle.getDiskPageId(closedFileId, testPageId), false);
+ page.acquireReadLatch();
+ try {
+ ArrayList<Integer> values = pageContents.get(closedFileId);
+ for(int j = 0; j < values.size(); j++) {
+ Assert.assertEquals(values.get(j).intValue(), page.getBuffer().getInt(j * 4));
+ }
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ }
+
+ for(Integer i : fileIds) {
+ bufferCache.closeFile(i.intValue());
+ }
+
+ bufferCache.close();
+ }
+
+ @AfterClass
+ public static void cleanup() throws Exception {
+ for(String s : openedFiles) {
+ File f = new File(s);
+ f.deleteOnExit();
+ }
+ }
+}
diff --git a/hyracks-tests/pom.xml b/hyracks-tests/pom.xml
index 4a391bc..ee1280d 100644
--- a/hyracks-tests/pom.xml
+++ b/hyracks-tests/pom.xml
@@ -12,7 +12,9 @@
</parent>
<modules>
+ <module>hyracks-storage-common-test</module>
<module>hyracks-storage-am-btree-test</module>
<module>hyracks-storage-am-invertedindex-test</module>
+ <module>hyracks-storage-am-rtree-test</module>
</modules>
</project>
diff --git a/pom.xml b/pom.xml
index b6cc875..e9ccdd0 100644
--- a/pom.xml
+++ b/pom.xml
@@ -71,8 +71,10 @@
<module>hyracks-control-nc</module>
<module>hyracks-cli</module>
<module>hyracks-storage-common</module>
+ <module>hyracks-storage-am-common</module>
<module>hyracks-storage-am-btree</module>
<module>hyracks-storage-am-invertedindex</module>
+ <module>hyracks-storage-am-rtree</module>
<module>hyracks-test-support</module>
<module>hyracks-tests</module>
<module>hyracks-server</module>