Added missing files.
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_lsm_tree@1187 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
new file mode 100644
index 0000000..85bfdd2
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexBulkLoadTest.java
@@ -0,0 +1,65 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexBulkLoadTest extends OrderedIndexTestDriver {
+
+ private final OrderedIndexTestUtils orderedIndexTestUtils;
+ private final int bulkLoadRounds;
+
+ public OrderedIndexBulkLoadTest(BTreeLeafFrameType[] leafFrameTypesToTest, int bulkLoadRounds) {
+ super(leafFrameTypesToTest);
+ this.bulkLoadRounds = bulkLoadRounds;
+ this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+ }
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+ throws Exception {
+ OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+ for (int i = 0; i < bulkLoadRounds; i++) {
+ // We assume all fieldSerdes are of the same type. Check the first
+ // one
+ // to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ orderedIndexTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+ orderedIndexTestUtils.bulkLoadStringTuples(ctx, numTuplesToInsert, getRandom());
+ }
+ orderedIndexTestUtils.checkPointSearches(ctx);
+ orderedIndexTestUtils.checkScan(ctx);
+ orderedIndexTestUtils.checkDiskOrderScan(ctx);
+ orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+ if (prefixLowKey != null && prefixHighKey != null) {
+ orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+ }
+ }
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "BulkLoad";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
new file mode 100644
index 0000000..93075a1
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexDeleteTest.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexDeleteTest extends OrderedIndexTestDriver {
+
+ private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+ public OrderedIndexDeleteTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+ super(leafFrameTypesToTest);
+ this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+ }
+
+ private static final int numInsertRounds = 3;
+ private static final int numDeleteRounds = 3;
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+ throws Exception {
+ OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+ for (int i = 0; i < numInsertRounds; i++) {
+ // We assume all fieldSerdes are of the same type. Check the first
+ // one to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+ orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+ }
+ int numTuplesPerDeleteRound = (int) Math
+ .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ for (int j = 0; j < numDeleteRounds; j++) {
+ orderedIndexTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
+ orderedIndexTestUtils.checkPointSearches(ctx);
+ orderedIndexTestUtils.checkScan(ctx);
+ orderedIndexTestUtils.checkDiskOrderScan(ctx);
+ orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+ if (prefixLowKey != null && prefixHighKey != null) {
+ orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+ }
+ }
+ }
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "Delete";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
new file mode 100644
index 0000000..da9d548
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexExamplesTest.java
@@ -0,0 +1,635 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexExamplesTest {
+ protected static final Logger LOGGER = Logger.getLogger(OrderedIndexExamplesTest.class.getName());
+ protected final Random rnd = new Random(50);
+
+ protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories)
+ throws TreeIndexException;
+
+ protected abstract int getIndexFileId();
+
+ /**
+ * Fixed-Length Key,Value Example.
+ *
+ * Create a tree index with one fixed-length key field and one fixed-length value
+ * field. Fill index with random values using insertions (not bulk load).
+ * Perform scans and range search.
+ */
+ @Test
+ public void fixedLengthKeyValueExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Fixed-Length Key,Value Example.");
+ }
+
+ // Declare fields.
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+
+ // Declare keys.
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ long start = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ int numInserts = 10000;
+ for (int i = 0; i < numInserts; i++) {
+ int f0 = rnd.nextInt() % numInserts;
+ int f1 = 5;
+ TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+ }
+
+ orderedScan(indexAccessor, fieldSerdes);
+ diskOrderScan(indexAccessor, fieldSerdes);
+
+ // Build low key.
+ ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(keyFieldCount);
+ ArrayTupleReference lowKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -1000);
+
+ // Build high key.
+ ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(keyFieldCount);
+ ArrayTupleReference highKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(highKeyTb, highKey, 1000);
+
+ rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+ treeIndex.close();
+ }
+
+ /**
+ * Composite Key Example (Non-Unique Index).
+ *
+ * Create a tree index with two fixed-length key fields and one fixed-length
+ * value field. Fill index with random values using insertions (not bulk
+ * load) Perform scans and range search.
+ */
+ @Test
+ public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Composite Key Test");
+ }
+
+ // Declare fields.
+ int fieldCount = 3;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ long start = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ int numInserts = 10000;
+ for (int i = 0; i < 10000; i++) {
+ int f0 = rnd.nextInt() % 2000;
+ int f1 = rnd.nextInt() % 1000;
+ int f2 = 5;
+ TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+ }
+
+ orderedScan(indexAccessor, fieldSerdes);
+ diskOrderScan(indexAccessor, fieldSerdes);
+
+ // Build low key.
+ ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference lowKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
+
+ // Build high key.
+ ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference highKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
+
+ // Prefix-Range search in [-3, 3]
+ rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+ treeIndex.close();
+ }
+
+ /**
+ * Variable-Length Example. Create a BTree with one variable-length key
+ * field and one variable-length value field. Fill BTree with random values
+ * using insertions (not bulk load) Perform ordered scans and range search.
+ */
+ @Test
+ public void varLenKeyValueExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Variable-Length Key,Value Example");
+ }
+
+ // Declare fields.
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+ typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Declare keys.
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ long start = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ // Max string length to be generated.
+ int maxLength = 10;
+ int numInserts = 10000;
+ for (int i = 0; i < 10000; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + f0 + " " + f1);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+ }
+
+ orderedScan(indexAccessor, fieldSerdes);
+ diskOrderScan(indexAccessor, fieldSerdes);
+
+ // Build low key.
+ ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference lowKey = new ArrayTupleReference();
+ TupleUtils.createTuple(lowKeyTb, lowKey, fieldSerdes, "cbf");
+
+ // Build high key.
+ ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference highKey = new ArrayTupleReference();
+ TupleUtils.createTuple(highKeyTb, highKey, fieldSerdes, "cc7");
+
+ rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+ treeIndex.close();
+ }
+
+ /**
+ * Deletion Example.
+ *
+ * Create a BTree with one variable-length key field and one variable-length
+ * value field. Fill B-tree with random values using insertions, then delete
+ * entries one-by-one. Repeat procedure a few times on same BTree.
+ */
+ @Test
+ public void deleteExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deletion Example");
+ }
+
+ // Declare fields.
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+ typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Declare keys.
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ // Max string length to be generated.
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
+ LOGGER.info("Inserting into tree...");
+ }
+ int maxLength = 10;
+ int ins = 10000;
+ String[] f0s = new String[ins];
+ String[] f1s = new String[ins];
+ int insDone = 0;
+ int[] insDoneCmp = new int[ins];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+ f0s[i] = f0;
+ f1s[i] = f1;
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ insDone++;
+ } catch (TreeIndexException e) {
+ }
+ insDoneCmp[i] = insDone;
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deleting from tree...");
+ }
+ int delDone = 0;
+ for (int i = 0; i < ins; i++) {
+ TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Deleting " + i);
+ }
+ }
+ try {
+ indexAccessor.delete(tuple);
+ delDone++;
+ } catch (TreeIndexException e) {
+ }
+ if (insDoneCmp[i] != delDone) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
+ LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+ }
+ break;
+ }
+ }
+ if (insDone != delDone) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+ }
+ break;
+ }
+ }
+ treeIndex.close();
+ }
+
+ /**
+ * Update example.
+ *
+ * Create a BTree with one variable-length key field and one variable-length
+ * value field. Fill B-tree with random values using insertions, then update
+ * entries one-by-one. Repeat procedure a few times on same BTree.
+ */
+ @Test
+ public void updateExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Update example");
+ }
+
+ // Declare fields.
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
+ typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Declare keys.
+ int keyFieldCount = 1;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ int maxLength = 10;
+ int ins = 10000;
+ String[] keys = new String[10000];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
+ keys[i] = f0;
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ // Print before doing any updates.
+ orderedScan(indexAccessor, fieldSerdes);
+
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
+ LOGGER.info("Updating BTree");
+ }
+ for (int i = 0; i < ins; i++) {
+ // Generate a new random value for f1.
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Updating " + i);
+ }
+ }
+ try {
+ indexAccessor.update(tuple);
+ } catch (TreeIndexException e) {
+ } catch (UnsupportedOperationException e) {
+ }
+ }
+ // Do another scan after a round of updates.
+ orderedScan(indexAccessor, fieldSerdes);
+ }
+ treeIndex.close();
+ }
+
+ /**
+ * Bulk load example.
+ *
+ * Load a tree with 100,000 tuples. BTree has a composite key to "simulate"
+ * non-unique index creation.
+ *
+ */
+ @Test
+ public void bulkLoadExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Bulk load example");
+ }
+ // Declare fields.
+ int fieldCount = 3;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, cmpFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ // Load sorted records.
+ int ins = 100000;
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Bulk loading " + ins + " tuples");
+ }
+ long start = System.currentTimeMillis();
+ IIndexBulkLoadContext bulkLoadCtx = treeIndex.beginBulkLoad(0.7f);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ for (int i = 0; i < ins; i++) {
+ TupleUtils.createIntegerTuple(tb, tuple, i, i, 5);
+ treeIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+ }
+ treeIndex.endBulkLoad(bulkLoadCtx);
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
+ }
+
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+
+ // Build low key.
+ ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference lowKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(lowKeyTb, lowKey, 44444);
+
+ // Build high key.
+ ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
+ ArrayTupleReference highKey = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(highKeyTb, highKey, 44500);
+
+ // Prefix-Range search in [44444, 44500]
+ rangeSearch(cmpFactories, indexAccessor, fieldSerdes, lowKey, highKey);
+
+ treeIndex.close();
+ }
+
+ private void orderedScan(ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+ throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ordered Scan:");
+ }
+ ITreeIndexCursor scanCursor = indexAccessor.createSearchCursor();
+ RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
+ indexAccessor.search(scanCursor, nullPred);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ scanCursor.close();
+ }
+ }
+
+ private void diskOrderScan(ITreeIndexAccessor indexAccessor,
+ ISerializerDeserializer[] fieldSerdes) throws Exception {
+ try {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Disk-Order Scan:");
+ }
+ TreeDiskOrderScanCursor diskOrderCursor = (TreeDiskOrderScanCursor) indexAccessor
+ .createDiskOrderScanCursor();
+ indexAccessor.diskOrderScan(diskOrderCursor);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ diskOrderCursor.close();
+ }
+ } catch (UnsupportedOperationException e) {
+ // Ignore exception because some indexes, e.g. the LSMBTree, don't
+ // support disk-order scan.
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ignoring disk-order scan since it's not supported.");
+ }
+ }
+ }
+
+ private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes,
+ ITupleReference lowKey, ITupleReference highKey) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
+ String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
+ LOGGER.info("Range-Search in: [ " + lowKeyString + ", " + highKeyString + "]");
+ }
+ ITreeIndexCursor rangeCursor = indexAccessor.createSearchCursor();
+ MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, lowKey);
+ MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(cmpFactories, highKey);
+ RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, lowKeySearchCmp,
+ highKeySearchCmp);
+ indexAccessor.search(rangeCursor, rangePred);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ rangeCursor.close();
+ }
+ }
+
+ public static String randomString(int length, Random random) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
new file mode 100644
index 0000000..d12603b
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexInsertTest.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ *
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ *
+ */
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexInsertTest extends OrderedIndexTestDriver {
+
+ private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+ public OrderedIndexInsertTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+ super(leafFrameTypesToTest);
+ this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+ }
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+ throws Exception {
+ OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+ // We assume all fieldSerdes are of the same type. Check the first one
+ // to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+ orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+ }
+
+ orderedIndexTestUtils.checkPointSearches(ctx);
+ orderedIndexTestUtils.checkScan(ctx);
+ orderedIndexTestUtils.checkDiskOrderScan(ctx);
+
+ orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+ if (prefixLowKey != null && prefixHighKey != null) {
+ orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+ }
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "Insert";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
new file mode 100644
index 0000000..3a894a2
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexMultiThreadTest.java
@@ -0,0 +1,126 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexMultiThreadTestDriver;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexMultiThreadTest {
+
+ protected final Logger LOGGER = Logger.getLogger(OrderedIndexMultiThreadTest.class.getName());
+
+ // Machine-specific number of threads to use for testing.
+ protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
+ // Excessive number of threads for testing.
+ protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
+ protected final int NUM_OPERATIONS = 10000;
+
+ protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
+
+ protected abstract void setUp() throws HyracksException;
+
+ protected abstract void tearDown() throws HyracksDataException;
+
+ protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException;
+
+ protected abstract int getFileId();
+
+ protected abstract ITreeIndexTestWorkerFactory getWorkerFactory();
+
+ protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
+
+ protected abstract String getIndexTypeName();
+
+ protected static float[] getUniformOpProbs(TestOperation[] ops) {
+ float[] opProbs = new float[ops.length];
+ for (int i = 0; i < ops.length; i++) {
+ opProbs[i] = 1.0f / (float) ops.length;
+ }
+ return opProbs;
+ }
+
+ protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, int numThreads, TestWorkloadConf conf, String dataMsg) throws InterruptedException, TreeIndexException, HyracksException {
+ setUp();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ String indexTypeName = getIndexTypeName();
+ LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads + "; Workload: " + conf.toString() + ".");
+ }
+
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+ IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
+
+ ITreeIndex index = createTreeIndex(typeTraits, cmpFactories);
+ ITreeIndexTestWorkerFactory workerFactory = getWorkerFactory();
+
+ // 4 batches per thread.
+ int batchSize = (NUM_OPERATIONS / numThreads) / 4;
+
+ TreeIndexMultiThreadTestDriver driver = new TreeIndexMultiThreadTestDriver(index, workerFactory, fieldSerdes, conf.ops, conf.opProbs);
+ driver.init(getFileId());
+ long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
+ driver.deinit();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree MultiThread Test Time: " + times[0] + "ms");
+ }
+
+ tearDown();
+ }
+
+ @Test
+ public void oneIntKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ int numKeys = 1;
+ String dataMsg = "One Int Key And Value";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+
+ @Test
+ public void oneStringKeyAndValue() throws InterruptedException, TreeIndexException, HyracksException {
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
+ int numKeys = 1;
+ String dataMsg = "One String Key And Value";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
new file mode 100644
index 0000000..ef23079
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestContext.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.TreeSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexTestContext extends TreeIndexTestContext<CheckTuple> {
+
+ protected final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
+
+ public OrderedIndexTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+ super(fieldSerdes, treeIndex);
+ }
+
+ @Override
+ public TreeSet<CheckTuple> getCheckTuples() {
+ return checkTuples;
+ }
+
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
new file mode 100644
index 0000000..8daa5e0
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestDriver.java
@@ -0,0 +1,178 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexTestDriver {
+ protected final Logger LOGGER = Logger.getLogger(OrderedIndexTestDriver.class.getName());
+
+ protected static final int numTuplesToInsert = 10000;
+
+ protected abstract OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception;
+
+ protected abstract Random getRandom();
+
+ protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+ throws Exception;
+
+ protected abstract String getTestOpName();
+
+ protected final BTreeLeafFrameType[] leafFrameTypesToTest;
+
+ public OrderedIndexTestDriver(BTreeLeafFrameType[] leafFrameTypesToTest) {
+ this.leafFrameTypesToTest = leafFrameTypesToTest;
+ }
+
+ @Test
+ public void oneIntKeyAndValue() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ // Range search in [-1000, 1000]
+ ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
+ ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
+ }
+ }
+
+ @Test
+ public void twoIntKeys() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+
+ // Range search in [50 0, 50 500]
+ ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
+ ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
+
+ // Prefix range search in [50, 50]
+ ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+ ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+ }
+ }
+
+ @Test
+ public void twoIntKeysAndValues() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+
+ // Range search in [50 100, 100 100]
+ ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
+ ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
+
+ // Prefix range search in [50, 50]
+ ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
+ ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+ }
+ }
+
+ @Test
+ public void oneStringKeyAndValue() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Range search in ["cbf", cc7"]
+ ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+ ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 1, leafFrameType, lowKey, highKey, null, null);
+ }
+ }
+
+ @Test
+ public void twoStringKeys() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Range search in ["cbf", "ddd", cc7", "eee"]
+ ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+ ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+
+ // Prefix range search in ["cbf", cc7"]
+ ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+ ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+ }
+ }
+
+ @Test
+ public void twoStringKeysAndValues() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+
+ // Range search in ["cbf", "ddd", cc7", "eee"]
+ ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
+ ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
+
+ // Prefix range search in ["cbf", cc7"]
+ ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
+ ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
+
+ for (BTreeLeafFrameType leafFrameType : leafFrameTypesToTest) {
+ runTest(fieldSerdes, 2, leafFrameType, lowKey, highKey, prefixLowKey, prefixHighKey);
+ }
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
new file mode 100644
index 0000000..36e5395
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexTestUtils.java
@@ -0,0 +1,380 @@
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.NavigableSet;
+import java.util.Random;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+@SuppressWarnings("rawtypes")
+public class OrderedIndexTestUtils extends TreeIndexTestUtils {
+ private static final Logger LOGGER = Logger.getLogger(OrderedIndexTestUtils.class.getName());
+
+ private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected,
+ ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+ for (int i = 0; i < fieldSerdes.length; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(actual.getFieldData(i), actual.getFieldStart(i),
+ actual.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object actualObj = fieldSerdes[i].deserialize(dataIn);
+ if (!actualObj.equals(expected.get(i))) {
+ fail("Actual and expected fields do not match on field " + i + ".\nExpected: " + expected.get(i)
+ + "\nActual : " + actualObj);
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ // Create a new TreeSet containing the elements satisfying the prefix
+ // search.
+ // Implementing prefix search by changing compareTo() in CheckTuple does not
+ // work.
+ public static TreeSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey,
+ CheckTuple highKey) {
+ TreeSet<CheckTuple> expectedSubset = new TreeSet<CheckTuple>();
+ Iterator<CheckTuple> iter = checkTuples.iterator();
+ while (iter.hasNext()) {
+ CheckTuple t = iter.next();
+ boolean geLowKey = true;
+ boolean leHighKey = true;
+ for (int i = 0; i < lowKey.getNumKeys(); i++) {
+ if (t.get(i).compareTo(lowKey.get(i)) < 0) {
+ geLowKey = false;
+ break;
+ }
+ }
+ for (int i = 0; i < highKey.getNumKeys(); i++) {
+ if (t.get(i).compareTo(highKey.get(i)) > 0) {
+ leHighKey = false;
+ break;
+ }
+ }
+ if (geLowKey && leHighKey) {
+ expectedSubset.add(t);
+ }
+ }
+ return expectedSubset;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void checkRangeSearch(ITreeIndexTestContext ctx, ITupleReference lowKey, ITupleReference highKey,
+ boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Testing Range Search.");
+ }
+ MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
+ MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
+ ITreeIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
+ RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp,
+ highKeyCmp);
+ ctx.getIndexAccessor().search(searchCursor, rangePred);
+ // Get the subset of elements from the expected set within given key
+ // range.
+ CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, ctx.getFieldSerdes(), lowKeyCmp.getKeyFieldCount());
+ CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, ctx.getFieldSerdes(),
+ highKeyCmp.getKeyFieldCount());
+ NavigableSet<CheckTuple> expectedSubset = null;
+ if (lowKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()
+ || highKeyCmp.getKeyFieldCount() < ctx.getKeyFieldCount()) {
+ // Searching on a key prefix (low key or high key or both).
+ expectedSubset = getPrefixExpectedSubset((TreeSet<CheckTuple>) ctx.getCheckTuples(), lowKeyCheck,
+ highKeyCheck);
+ } else {
+ // Searching on all key fields.
+ expectedSubset = ((TreeSet<CheckTuple>) ctx.getCheckTuples()).subSet(lowKeyCheck, lowKeyInclusive,
+ highKeyCheck, highKeyInclusive);
+ }
+ Iterator<CheckTuple> checkIter = expectedSubset.iterator();
+ int actualCount = 0;
+ try {
+ while (searchCursor.hasNext()) {
+ if (!checkIter.hasNext()) {
+ fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
+ }
+ searchCursor.next();
+ CheckTuple expectedTuple = checkIter.next();
+ ITupleReference tuple = searchCursor.getTuple();
+ compareActualAndExpected(tuple, expectedTuple, ctx.getFieldSerdes());
+ actualCount++;
+ }
+ if (actualCount < expectedSubset.size()) {
+ fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size()
+ + "\nActual : " + actualCount);
+ }
+ } finally {
+ searchCursor.close();
+ }
+ }
+
+ public void checkPointSearches(ITreeIndexTestContext ictx) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Testing Point Searches On All Expected Keys.");
+ }
+ OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+ ITreeIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
+
+ ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
+ ArrayTupleReference lowKey = new ArrayTupleReference();
+ ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(ctx.getKeyFieldCount());
+ ArrayTupleReference highKey = new ArrayTupleReference();
+ RangePredicate rangePred = new RangePredicate(lowKey, highKey, true, true, null, null);
+
+ // Iterate through expected tuples, and perform a point search in the
+ // BTree to verify the tuple can be reached.
+ for (CheckTuple checkTuple : ctx.getCheckTuples()) {
+ createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, ctx.getFieldSerdes());
+ createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, ctx.getFieldSerdes());
+ MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), lowKey);
+ MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), highKey);
+
+ rangePred.setLowKey(lowKey, true);
+ rangePred.setHighKey(highKey, true);
+ rangePred.setLowKeyComparator(lowKeyCmp);
+ rangePred.setHighKeyComparator(highKeyCmp);
+
+ ctx.getIndexAccessor().search(searchCursor, rangePred);
+
+ try {
+ // We expect exactly one answer.
+ if (searchCursor.hasNext()) {
+ searchCursor.next();
+ ITupleReference tuple = searchCursor.getTuple();
+ compareActualAndExpected(tuple, checkTuple, ctx.getFieldSerdes());
+ }
+ if (searchCursor.hasNext()) {
+ fail("Point search returned more than one answer.");
+ }
+ } finally {
+ searchCursor.close();
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void insertStringTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ String[] fieldValues = new String[fieldCount];
+ for (int i = 0; i < numTuples; i++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+ LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+ }
+ }
+ // Set keys.
+ for (int j = 0; j < numKeyFields; j++) {
+ int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+ fieldValues[j] = getRandomString(length, rnd);
+ }
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = getRandomString(5, rnd);
+ }
+ TupleUtils.createTuple(ctx.getTupleBuilder(), ctx.getTuple(), ctx.getFieldSerdes(), (Object[]) fieldValues);
+ try {
+ ctx.getIndexAccessor().insert(ctx.getTuple());
+ // Set expected values. Do this only after insertion succeeds
+ // because we ignore duplicate keys.
+ ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+ } catch (BTreeDuplicateKeyException e) {
+ // Ignore duplicate key insertions.
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void bulkLoadStringTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ String[] fieldValues = new String[fieldCount];
+ TreeSet<CheckTuple> tmpCheckTuples = new TreeSet<CheckTuple>();
+ for (int i = 0; i < numTuples; i++) {
+ // Set keys.
+ for (int j = 0; j < numKeyFields; j++) {
+ int length = (Math.abs(rnd.nextInt()) % 10) + 1;
+ fieldValues[j] = getRandomString(length, rnd);
+ }
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = getRandomString(5, rnd);
+ }
+ // Set expected values. We also use these as the pre-sorted stream
+ // for bulk loading.
+ ctx.insertCheckTuple(createStringCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+ }
+ bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+ // Add tmpCheckTuples to ctx check tuples for comparing searches.
+ for (CheckTuple checkTuple : tmpCheckTuples) {
+ ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void updateTuples(ITreeIndexTestContext ictx, int numTuples, Random rnd) throws Exception {
+ OrderedIndexTestContext ctx = (OrderedIndexTestContext) ictx;
+ int fieldCount = ctx.getFieldCount();
+ int keyFieldCount = ctx.getKeyFieldCount();
+ // This is a noop because we can only update non-key fields.
+ if (fieldCount == keyFieldCount) {
+ return;
+ }
+ ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference updateTuple = new ArrayTupleReference();
+ int numCheckTuples = ctx.getCheckTuples().size();
+ // Copy CheckTuple references into array, so we can randomly pick from
+ // there.
+ CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+ int idx = 0;
+ for (CheckTuple checkTuple : ctx.getCheckTuples()) {
+ checkTuples[idx++] = checkTuple;
+ }
+ for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+ LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
+ }
+ }
+ int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+ CheckTuple checkTuple = checkTuples[checkTupleIdx];
+ // Update check tuple's non-key fields.
+ for (int j = keyFieldCount; j < fieldCount; j++) {
+ Comparable newValue = getRandomUpdateValue(ctx.getFieldSerdes()[j], rnd);
+ checkTuple.set(j, newValue);
+ }
+
+ createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, ctx.getFieldSerdes());
+ ctx.getIndexAccessor().update(updateTuple);
+
+ // Swap with last "valid" CheckTuple.
+ CheckTuple tmp = checkTuples[numCheckTuples - 1];
+ checkTuples[numCheckTuples - 1] = checkTuple;
+ checkTuples[checkTupleIdx] = tmp;
+ numCheckTuples--;
+ }
+ }
+
+ public CheckTuple createStringCheckTuple(String[] fieldValues, int numKeyFields) {
+ CheckTuple<String> checkTuple = new CheckTuple<String>(fieldValues.length, numKeyFields);
+ for (String s : fieldValues) {
+ checkTuple.add((String) s);
+ }
+ return checkTuple;
+ }
+
+ private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
+ if (serde instanceof IntegerSerializerDeserializer) {
+ return Integer.valueOf(rnd.nextInt());
+ } else if (serde instanceof UTF8StringSerializerDeserializer) {
+ return getRandomString(10, rnd);
+ }
+ return null;
+ }
+
+ public static String getRandomString(int length, Random rnd) {
+ String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
+
+ @Override
+ protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
+ return new CheckTuple(numFields, numKeyFields);
+ }
+
+ @Override
+ protected ISearchPredicate createNullSearchPredicate() {
+ return new RangePredicate(null, null, true, true, null, null);
+ }
+
+ @Override
+ public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+ ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ int actualCount = 0;
+ try {
+ while (cursor.hasNext()) {
+ if (!checkIter.hasNext()) {
+ fail("Ordered scan returned more answers than expected.\nExpected: " + checkTuples.size());
+ }
+ cursor.next();
+ CheckTuple expectedTuple = checkIter.next();
+ ITupleReference tuple = cursor.getTuple();
+ compareActualAndExpected(tuple, expectedTuple, fieldSerdes);
+ actualCount++;
+ }
+ if (actualCount < checkTuples.size()) {
+ fail("Ordered scan returned fewer answers than expected.\nExpected: " + checkTuples.size()
+ + "\nActual : " + actualCount);
+ }
+ } finally {
+ cursor.close();
+ }
+
+ }
+
+ @Override
+ protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
+ CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldValues.length, numKeyFields);
+ for (int v : fieldValues) {
+ checkTuple.add(v);
+ }
+ return checkTuple;
+ }
+
+ @Override
+ protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
+ for (int j = 0; j < numKeyFields; j++) {
+ fieldValues[j] = rnd.nextInt() % maxValue;
+ }
+ }
+
+ @Override
+ protected Collection createCheckTuplesCollection() {
+ return new TreeSet<CheckTuple>();
+ }
+
+ @Override
+ protected ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx) {
+ return new ArrayTupleBuilder(ctx.getKeyFieldCount());
+ }
+
+ @Override
+ protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, ITreeIndexTestContext ctx) throws HyracksDataException {
+ @SuppressWarnings("unchecked")
+ TreeSet<CheckTuple> checkTuples = (TreeSet<CheckTuple>) ctx.getCheckTuples();
+ CheckTuple matchingCheckTuple = checkTuples.floor(checkTuple);
+ if (matchingCheckTuple == null) {
+ return false;
+ }
+ compareActualAndExpected(tuple, matchingCheckTuple, ctx.getFieldSerdes());
+ return true;
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
new file mode 100644
index 0000000..65b2ade
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/btree/OrderedIndexUpdateTest.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+
+@SuppressWarnings("rawtypes")
+public abstract class OrderedIndexUpdateTest extends OrderedIndexTestDriver {
+
+ private final OrderedIndexTestUtils orderedIndexTestUtils;
+
+ public OrderedIndexUpdateTest(BTreeLeafFrameType[] leafFrameTypesToTest) {
+ super(leafFrameTypesToTest);
+ this.orderedIndexTestUtils = new OrderedIndexTestUtils();
+ }
+
+ private static final int numUpdateRounds = 3;
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType,
+ ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey)
+ throws Exception {
+ // This is a noop because we can only update non-key fields.
+ if (fieldSerdes.length == numKeys) {
+ return;
+ }
+ OrderedIndexTestContext ctx = createTestContext(fieldSerdes, numKeys, leafType);
+ // We assume all fieldSerdes are of the same type. Check the first one
+ // to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ orderedIndexTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
+ orderedIndexTestUtils.insertStringTuples(ctx, numTuplesToInsert, getRandom());
+ }
+ int numTuplesPerDeleteRound = (int) Math.ceil((float) ctx.getCheckTuples().size() / (float) numUpdateRounds);
+ for (int j = 0; j < numUpdateRounds; j++) {
+ orderedIndexTestUtils.updateTuples(ctx, numTuplesPerDeleteRound, getRandom());
+ orderedIndexTestUtils.checkPointSearches(ctx);
+ orderedIndexTestUtils.checkScan(ctx);
+ orderedIndexTestUtils.checkDiskOrderScan(ctx);
+ orderedIndexTestUtils.checkRangeSearch(ctx, lowKey, highKey, true, true);
+ if (prefixLowKey != null && prefixHighKey != null) {
+ orderedIndexTestUtils.checkRangeSearch(ctx, prefixLowKey, prefixHighKey, true, true);
+ }
+ }
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "Update";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java
new file mode 100644
index 0000000..e7fee30
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/AbstractTreeIndexTestWorker.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Random;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.datagen.TupleBatch;
+
+public abstract class AbstractTreeIndexTestWorker extends Thread implements ITreeIndexTestWorker {
+ private Random rnd = new Random();
+ private final DataGenThread dataGen;
+ private final TestOperationSelector opSelector;
+ private final int numBatches;
+
+ protected final ITreeIndexAccessor indexAccessor;
+
+ public AbstractTreeIndexTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+ this.dataGen = dataGen;
+ this.opSelector = opSelector;
+ this.numBatches = numBatches;
+ indexAccessor = index.createAccessor();
+ }
+
+ @Override
+ public void run() {
+ try {
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.getBatch();
+ for (int j = 0; j < batch.size(); j++) {
+ TestOperation op = opSelector.getOp(rnd.nextInt());
+ ITupleReference tuple = batch.get(j);
+ performOp(tuple, op);
+ }
+ dataGen.releaseBatch(batch);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
new file mode 100644
index 0000000..4b4b90b
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/CheckTuple.java
@@ -0,0 +1,73 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+@SuppressWarnings({"rawtypes", "unchecked"})
+public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
+ protected final int numKeys;
+ protected final Comparable[] tuple;
+ protected int pos;
+
+ public CheckTuple(int numFields, int numKeys) {
+ this.numKeys = numKeys;
+ this.tuple = new Comparable[numFields];
+ pos = 0;
+ }
+
+ public void add(T e) {
+ tuple[pos++] = e;
+ }
+
+ @Override
+ public int compareTo(T o) {
+ CheckTuple<T> other = (CheckTuple<T>)o;
+ for (int i = 0; i < numKeys; i++) {
+ int cmp = tuple[i].compareTo(other.get(i));
+ if (cmp != 0) {
+ return cmp;
+ }
+ }
+ return 0;
+ }
+
+ public T get(int idx) {
+ return (T)tuple[idx];
+ }
+
+ public void set(int idx, T e) {
+ tuple[idx] = e;
+ }
+
+ public int size() {
+ return tuple.length;
+ }
+
+ public int getNumKeys() {
+ return numKeys;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < tuple.length; i++) {
+ strBuilder.append(tuple[i].toString());
+ if (i != tuple.length-1) {
+ strBuilder.append(" ");
+ }
+ }
+ return strBuilder.toString();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java
new file mode 100644
index 0000000..e4a40c1
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestContext.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+
+@SuppressWarnings("rawtypes")
+public interface ITreeIndexTestContext<T extends CheckTuple> {
+ public int getFieldCount();
+
+ public int getKeyFieldCount();
+
+ public ISerializerDeserializer[] getFieldSerdes();
+
+ public IBinaryComparatorFactory[] getComparatorFactories();
+
+ public ITreeIndexAccessor getIndexAccessor();
+
+ public ITreeIndex getIndex();
+
+ public ArrayTupleReference getTuple();
+
+ public ArrayTupleBuilder getTupleBuilder();
+
+ public void insertCheckTuple(T checkTuple, Collection<T> checkTuples);
+
+ public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples);
+
+ public Collection<T> getCheckTuples();
+
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java
new file mode 100644
index 0000000..62d30bb
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorker.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+public interface ITreeIndexTestWorker {
+ void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, TreeIndexException;
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java
new file mode 100644
index 0000000..64b5aea
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/ITreeIndexTestWorkerFactory.java
@@ -0,0 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public interface ITreeIndexTestWorkerFactory {
+ public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches);
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
new file mode 100644
index 0000000..d05e80e
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestOperationSelector.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Arrays;
+
+public class TestOperationSelector {
+
+ public static enum TestOperation {
+ INSERT,
+ DELETE,
+ UPDATE,
+ POINT_SEARCH,
+ RANGE_SEARCH,
+ SCAN,
+ DISKORDER_SCAN,
+ MERGE
+ }
+
+ private final TestOperation[] ops;
+ private final int[] opRanges;
+
+ public TestOperationSelector(TestOperation[] ops, float[] opProbs) {
+ sanityCheck(ops, opProbs);
+ this.ops = ops;
+ this.opRanges = getOpRanges(opProbs);
+ }
+
+ private void sanityCheck(TestOperation[] ops, float[] opProbs) {
+ if (ops.length == 0) {
+ throw new RuntimeException("Empty op array.");
+ }
+ if (opProbs.length == 0) {
+ throw new RuntimeException("Empty op probabilities.");
+ }
+ if (ops.length != opProbs.length) {
+ throw new RuntimeException("Ops and op probabilities have unequal length.");
+ }
+ float sum = 0.0f;
+ for (int i = 0; i < opProbs.length; i++) {
+ sum += opProbs[i];
+ }
+ if (sum != 1.0f) {
+ throw new RuntimeException("Op probabilities don't add up to 1.");
+ }
+ }
+
+ private int[] getOpRanges(float[] opProbabilities) {
+ int[] opRanges = new int[opProbabilities.length];
+ if (opRanges.length > 1) {
+ opRanges[0] = (int) Math.floor(Integer.MAX_VALUE * opProbabilities[0]);
+ for (int i = 1; i < opRanges.length - 1; i++) {
+ opRanges[i] = opRanges[i - 1] + (int) Math.floor(Integer.MAX_VALUE * opProbabilities[i]);
+ }
+ opRanges[opRanges.length - 1] = Integer.MAX_VALUE;
+ } else {
+ opRanges[0] = Integer.MAX_VALUE;
+ }
+ return opRanges;
+ }
+
+ public TestOperation getOp(int randomInt) {
+ int ix = Arrays.binarySearch(opRanges, randomInt);
+ if (ix < 0) {
+ ix = -ix - 1;
+ }
+ return ops[ix];
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
new file mode 100644
index 0000000..2437514
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TestWorkloadConf.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+
+public class TestWorkloadConf {
+ public final TestOperation[] ops;
+ public final float[] opProbs;
+
+ public TestWorkloadConf(TestOperation[] ops, float[] opProbs) {
+ this.ops = ops;
+ this.opProbs = opProbs;
+ }
+
+ public String toString() {
+ StringBuilder strBuilder = new StringBuilder();
+ for (TestOperation op : ops) {
+ strBuilder.append(op.toString());
+ strBuilder.append(',');
+ }
+ strBuilder.deleteCharAt(strBuilder.length() - 1);
+ return strBuilder.toString();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java
new file mode 100644
index 0000000..8c1d06f
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexMultiThreadTestDriver.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+@SuppressWarnings("rawtypes")
+public class TreeIndexMultiThreadTestDriver {
+ private static final int RANDOM_SEED = 50;
+ // Means no additional payload. Only the specified fields.
+ private static final int PAYLOAD_SIZE = 0;
+ private final TestOperationSelector opSelector;
+ private final ISerializerDeserializer[] fieldSerdes;
+ private final ITreeIndex index;
+ private final ITreeIndexTestWorkerFactory workerFactory;
+
+ public TreeIndexMultiThreadTestDriver(ITreeIndex index, ITreeIndexTestWorkerFactory workerFactory,
+ ISerializerDeserializer[] fieldSerdes, TestOperation[] ops, float[] opProbs) {
+ this.index = index;
+ this.workerFactory = workerFactory;
+ this.fieldSerdes = fieldSerdes;
+ this.opSelector = new TestOperationSelector(ops, opProbs);
+ }
+
+ public void init(int fileId) throws HyracksDataException {
+ index.create(fileId);
+ index.open(fileId);
+ }
+
+ public long[] run(int numThreads, int numRepeats, int numOps, int batchSize) throws InterruptedException, TreeIndexException {
+ int numBatches = numOps / batchSize;
+ int threadNumBatches = numBatches / numThreads;
+ if (threadNumBatches <= 0) {
+ throw new TreeIndexException("Inconsistent parameters given. Need at least one batch per thread.");
+ }
+ long[] times = new long[numRepeats];
+ for (int i = 0; i < numRepeats; i++) {
+ DataGenThread dataGen = createDatagenThread(numThreads, numBatches, batchSize);
+ dataGen.start();
+ // Wait until the tupleBatchQueue is filled to capacity.
+ while (dataGen.tupleBatchQueue.remainingCapacity() != 0 && dataGen.tupleBatchQueue.size() != numBatches) {
+ Thread.sleep(10);
+ }
+
+ // Start worker threads.
+ AbstractTreeIndexTestWorker[] workers = new AbstractTreeIndexTestWorker[numThreads];
+ long start = System.currentTimeMillis();
+ for (int j = 0; j < numThreads; j++) {
+ workers[j] = workerFactory.create(dataGen, opSelector, index, threadNumBatches);
+ workers[j].start();
+ }
+ // Join worker threads.
+ for (int j = 0; j < numThreads; j++) {
+ workers[j].join();
+ }
+ long end = System.currentTimeMillis();
+ times[i] = end - start;
+ }
+ return times;
+ }
+
+ public void deinit() throws HyracksDataException {
+ index.close();
+ }
+
+ // To allow subclasses to override the data gen params.
+ public DataGenThread createDatagenThread(int numThreads, int numBatches, int batchSize) {
+ return new DataGenThread(numThreads, numBatches, batchSize, fieldSerdes, PAYLOAD_SIZE, RANDOM_SEED, 2*numThreads, false);
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java
new file mode 100644
index 0000000..934d907
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestContext.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.common;
+
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+
+@SuppressWarnings("rawtypes")
+public abstract class TreeIndexTestContext<T extends CheckTuple> implements ITreeIndexTestContext<T> {
+ protected final ISerializerDeserializer[] fieldSerdes;
+ protected final ITreeIndex treeIndex;
+ protected final ArrayTupleBuilder tupleBuilder;
+ protected final ArrayTupleReference tuple = new ArrayTupleReference();
+ protected final ITreeIndexAccessor indexAccessor;
+
+ public TreeIndexTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+ this.fieldSerdes = fieldSerdes;
+ this.treeIndex = treeIndex;
+ this.indexAccessor = treeIndex.createAccessor();
+ this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldSerdes.length;
+ }
+
+ @Override
+ public ITreeIndexAccessor getIndexAccessor() {
+ return indexAccessor;
+ }
+
+ @Override
+ public ArrayTupleReference getTuple() {
+ return tuple;
+ }
+
+ @Override
+ public ArrayTupleBuilder getTupleBuilder() {
+ return tupleBuilder;
+ }
+
+ @Override
+ public ISerializerDeserializer[] getFieldSerdes() {
+ return fieldSerdes;
+ }
+
+ @Override
+ public ITreeIndex getIndex() {
+ return treeIndex;
+ }
+
+ @Override
+ public void insertCheckTuple(T checkTuple, Collection<T> checkTuples) {
+ checkTuples.add(checkTuple);
+ }
+
+ @Override
+ public void deleteCheckTuple(T checkTuple, Collection<T> checkTuples) {
+ checkTuples.remove(checkTuple);
+ }
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
new file mode 100644
index 0000000..4c1d6f8
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/common/TreeIndexTestUtils.java
@@ -0,0 +1,245 @@
+package edu.uci.ics.hyracks.storage.am.common;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+@SuppressWarnings("rawtypes")
+public abstract class TreeIndexTestUtils {
+ private static final Logger LOGGER = Logger.getLogger(TreeIndexTestUtils.class.getName());
+
+ protected abstract CheckTuple createCheckTuple(int numFields, int numKeyFields);
+
+ protected abstract ISearchPredicate createNullSearchPredicate();
+
+ public abstract void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+ ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception;
+
+ protected abstract CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields);
+
+ protected abstract void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd);
+
+ protected abstract Collection createCheckTuplesCollection();
+
+ protected abstract ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx);
+
+ // See if tuple with corresponding checkTuple exists in ctx.checkTuples.
+ protected abstract boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, ITreeIndexTestContext ctx) throws HyracksDataException;
+
+ @SuppressWarnings("unchecked")
+ public static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder,
+ ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
+ int fieldCount = tupleBuilder.getFieldEndOffsets().length;
+ DataOutput dos = tupleBuilder.getDataOutput();
+ tupleBuilder.reset();
+ for (int i = 0; i < fieldCount; i++) {
+ fieldSerdes[i].serialize(checkTuple.get(i), dos);
+ tupleBuilder.addFieldEndOffset();
+ }
+ tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
+ }
+
+ @SuppressWarnings("unchecked")
+ public CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes,
+ int numKeys) throws HyracksDataException {
+ CheckTuple checkTuple = createCheckTuple(fieldSerdes.length, numKeys);
+ int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
+ for (int i = 0; i < fieldCount; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Comparable fieldObj = (Comparable) fieldSerdes[i].deserialize(dataIn);
+ checkTuple.add(fieldObj);
+ }
+ return checkTuple;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void checkScan(ITreeIndexTestContext ctx) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Testing Scan.");
+ }
+ ITreeIndexCursor scanCursor = ctx.getIndexAccessor().createSearchCursor();
+ ISearchPredicate nullPred = createNullSearchPredicate();
+ ctx.getIndexAccessor().search(scanCursor, nullPred);
+ Iterator<CheckTuple> checkIter = ctx.getCheckTuples().iterator();
+ checkExpectedResults(scanCursor, ctx.getCheckTuples(), ctx.getFieldSerdes(), ctx.getKeyFieldCount(), checkIter);
+ }
+
+ public void checkDiskOrderScan(ITreeIndexTestContext ctx) throws Exception {
+ try {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Testing Disk-Order Scan.");
+ }
+ ITreeIndexCursor diskOrderCursor = ctx.getIndexAccessor().createDiskOrderScanCursor();
+ ctx.getIndexAccessor().diskOrderScan(diskOrderCursor);
+ int actualCount = 0;
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference tuple = diskOrderCursor.getTuple();
+ CheckTuple checkTuple = createCheckTupleFromTuple(tuple, ctx.getFieldSerdes(),
+ ctx.getKeyFieldCount());
+ if (!checkDiskOrderScanResult(tuple, checkTuple, ctx)) {
+ fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
+ }
+ actualCount++;
+ }
+ if (actualCount < ctx.getCheckTuples().size()) {
+ fail("Disk-order scan returned fewer answers than expected.\nExpected: "
+ + ctx.getCheckTuples().size() + "\nActual : " + actualCount);
+ }
+ if (actualCount > ctx.getCheckTuples().size()) {
+ fail("Disk-order scan returned more answers than expected.\nExpected: "
+ + ctx.getCheckTuples().size() + "\nActual : " + actualCount);
+ }
+ } finally {
+ diskOrderCursor.close();
+ }
+ } catch (UnsupportedOperationException e) {
+ // Ignore exception because some indexes, e.g. the LSMTrees, don't
+ // support disk-order scan.
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ignoring disk-order scan since it's not supported.");
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void insertIntTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ int[] fieldValues = new int[ctx.getFieldCount()];
+ // Scale range of values according to number of keys.
+ // For example, for 2 keys we want the square root of numTuples, for 3
+ // keys the cube root of numTuples, etc.
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ for (int i = 0; i < numTuples; i++) {
+ // Set keys.
+ setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = j;
+ }
+ TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+ LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+ }
+ }
+ try {
+ ctx.getIndexAccessor().insert(ctx.getTuple());
+ ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+ } catch (TreeIndexException e) {
+ // We set expected values only after insertion succeeds because
+ // we ignore duplicate keys.
+ }
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ public void bulkLoadIntTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ int[] fieldValues = new int[ctx.getFieldCount()];
+ int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
+ for (int i = 0; i < numTuples; i++) {
+ // Set keys.
+ setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = j;
+ }
+
+ // Set expected values. (We also use these as the pre-sorted stream
+ // for ordered indexes bulk loading).
+ ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+ }
+ bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+ // Add tmpCheckTuples to ctx check tuples for comparing searches.
+ for (CheckTuple checkTuple : tmpCheckTuples) {
+ ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+ }
+ }
+
+ public static void bulkLoadCheckTuples(ITreeIndexTestContext ctx, Collection<CheckTuple> checkTuples)
+ throws HyracksDataException, TreeIndexException {
+ int fieldCount = ctx.getFieldCount();
+ int numTuples = checkTuples.size();
+ ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ // Perform bulk load.
+ IIndexBulkLoadContext bulkLoadCtx = ctx.getIndex().beginBulkLoad(0.7f);
+ int c = 1;
+ for (CheckTuple checkTuple : checkTuples) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (c % (numTuples / 10) == 0) {
+ LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
+ }
+ }
+ createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, ctx.getFieldSerdes());
+ ctx.getIndex().bulkLoadAddTuple(tuple, bulkLoadCtx);
+ c++;
+ }
+ ctx.getIndex().endBulkLoad(bulkLoadCtx);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void deleteTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ ArrayTupleBuilder deleteTupleBuilder = createDeleteTupleBuilder(ctx);
+ ArrayTupleReference deleteTuple = new ArrayTupleReference();
+ int numCheckTuples = ctx.getCheckTuples().size();
+ // Copy CheckTuple references into array, so we can randomly pick from
+ // there.
+ CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
+ int idx = 0;
+ Iterator<CheckTuple> iter = ctx.getCheckTuples().iterator();
+ while (iter.hasNext()) {
+ CheckTuple checkTuple = iter.next();
+ checkTuples[idx++] = checkTuple;
+ }
+
+ for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+ LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
+ }
+ }
+ int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
+ CheckTuple checkTuple = checkTuples[checkTupleIdx];
+ createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, ctx.getFieldSerdes());
+ ctx.getIndexAccessor().delete(deleteTuple);
+
+ // Remove check tuple from expected results.
+ ctx.deleteCheckTuple(checkTuple, ctx.getCheckTuples());
+
+ // Swap with last "valid" CheckTuple.
+ CheckTuple tmp = checkTuples[numCheckTuples - 1];
+ checkTuples[numCheckTuples - 1] = checkTuple;
+ checkTuples[checkTupleIdx] = tmp;
+ numCheckTuples--;
+ }
+ }
+
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
new file mode 100644
index 0000000..198ac58
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeBulkLoadTest.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeBulkLoadTest extends AbstractRTreeTestDriver {
+
+ private final RTreeTestUtils rTreeTestUtils;
+ private final int bulkLoadRounds;
+
+ public AbstractRTreeBulkLoadTest(int bulkLoadRounds) {
+ this.bulkLoadRounds = bulkLoadRounds;
+ this.rTreeTestUtils = new RTreeTestUtils();
+ }
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+ AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+ for (int i = 0; i < bulkLoadRounds; i++) {
+ // We assume all fieldSerdes are of the same type. Check the first
+ // one to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ rTreeTestUtils.bulkLoadIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+ rTreeTestUtils.bulkLoadDoubleTuples(ctx, numTuplesToInsert, getRandom());
+ }
+
+ rTreeTestUtils.checkScan(ctx);
+ rTreeTestUtils.checkDiskOrderScan(ctx);
+ rTreeTestUtils.checkRangeSearch(ctx, key);
+ }
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "BulkLoad";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
new file mode 100644
index 0000000..e70f433
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeDeleteTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeDeleteTest extends AbstractRTreeTestDriver {
+
+ private final RTreeTestUtils rTreeTestUtils;
+
+ private static final int numInsertRounds = 2;
+ private static final int numDeleteRounds = 2;
+
+ public AbstractRTreeDeleteTest() {
+ this.rTreeTestUtils = new RTreeTestUtils();
+ }
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+ AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+ for (int i = 0; i < numInsertRounds; i++) {
+ // We assume all fieldSerdes are of the same type. Check the first
+ // one to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+ rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
+ }
+ int numTuplesPerDeleteRound = (int) Math
+ .ceil((float) ctx.getCheckTuples().size() / (float) numDeleteRounds);
+ for (int j = 0; j < numDeleteRounds; j++) {
+ rTreeTestUtils.deleteTuples(ctx, numTuplesPerDeleteRound, getRandom());
+ rTreeTestUtils.checkScan(ctx);
+ rTreeTestUtils.checkDiskOrderScan(ctx);
+ rTreeTestUtils.checkRangeSearch(ctx, key);
+ }
+ }
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "Delete";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
new file mode 100644
index 0000000..7cb946a
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeExamplesTest.java
@@ -0,0 +1,567 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeExamplesTest {
+ protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeExamplesTest.class.getName());
+ protected final Random rnd = new Random(50);
+
+ protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
+ IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+ IPrimitiveValueProviderFactory[] valueProviderFactories) throws TreeIndexException;
+
+ protected abstract int getIndexFileId();
+
+ /**
+ * Two Dimensions Example.
+ *
+ * Create an RTree index of two dimensions, where they keys are of type
+ * integer, and the payload is two integer values. Fill index with random
+ * values using insertions (not bulk load). Perform scans and range search.
+ */
+ @Test
+ public void twoDimensionsExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Fixed-Length Key,Value Example.");
+ }
+
+ // Declare fields.
+ int fieldCount = 6;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+
+ // Declare RTree keys.
+ int rtreeKeyFieldCount = 4;
+ IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+ rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // Declare BTree keys, this will only be used for LSMRTree
+ int btreeKeyFieldCount = 6;
+ IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+ btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // create value providers
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ long start = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ int numInserts = 10000;
+ for (int i = 0; i < numInserts; i++) {
+ int p1x = rnd.nextInt();
+ int p1y = rnd.nextInt();
+ int p2x = rnd.nextInt();
+ int p2y = rnd.nextInt();
+
+ int pk1 = 5;
+ int pk2 = 10;
+
+ TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+ Math.max(p1y, p2y), pk1, pk2);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+ + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + ", " + pk1 + ", " + pk2);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+ }
+
+ scan(indexAccessor, fieldSerdes);
+ diskOrderScan(indexAccessor, fieldSerdes);
+
+ // Build key.
+ ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+ ArrayTupleReference key = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+ rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+ treeIndex.close();
+ }
+
+ /**
+ * Two Dimensions Example.
+ *
+ * Create an RTree index of three dimensions, where they keys are of type
+ * double, and the payload is one double value. Fill index with random
+ * values using insertions (not bulk load). Perform scans and range search.
+ */
+ @Test
+ public void threeDimensionsExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Fixed-Length Key,Value Example.");
+ }
+
+ // Declare fields.
+ int fieldCount = 7;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = DoublePointable.TYPE_TRAITS;
+ typeTraits[1] = DoublePointable.TYPE_TRAITS;
+ typeTraits[2] = DoublePointable.TYPE_TRAITS;
+ typeTraits[3] = DoublePointable.TYPE_TRAITS;
+ typeTraits[4] = DoublePointable.TYPE_TRAITS;
+ typeTraits[5] = DoublePointable.TYPE_TRAITS;
+ typeTraits[6] = DoublePointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+ // Declare RTree keys.
+ int rtreeKeyFieldCount = 6;
+ IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+ rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ rtreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ rtreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+
+ // Declare RTree keys.
+ int btreeKeyFieldCount = 7;
+ IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+ btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[5] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+ btreeCmpFactories[6] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY);
+
+ // create value providers
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ rtreeCmpFactories.length, DoublePointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ long start = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Inserting into tree...");
+ }
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+ int numInserts = 10000;
+ for (int i = 0; i < numInserts; i++) {
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p1z = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+ double p2z = rnd.nextDouble();
+
+ double pk = 5.0;
+
+ TupleUtils.createDoubleTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.min(p1z, p2z),
+ Math.max(p1x, p2x), Math.max(p1y, p2y), Math.max(p1z, p2z), pk);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+ + Math.min(p1z, p2z) + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
+ + Math.max(p1z, p2z) + ", " + pk);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ }
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
+ }
+
+ scan(indexAccessor, fieldSerdes);
+ diskOrderScan(indexAccessor, fieldSerdes);
+
+ // Build key.
+ ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+ ArrayTupleReference key = new ArrayTupleReference();
+ TupleUtils.createDoubleTuple(keyTb, key, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0);
+
+ rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+ treeIndex.close();
+ }
+
+ /**
+ * Deletion Example.
+ *
+ * Create an RTree index of two dimensions, where they keys are of type
+ * integer, and the payload is one integer value. Fill index with random
+ * values using insertions, then delete entries one-by-one. Repeat procedure
+ * a few times on same RTree.
+ */
+ @Test
+ public void deleteExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deletion Example");
+ }
+
+ // Declare fields.
+ int fieldCount = 5;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+
+ // Declare RTree keys.
+ int rtreeKeyFieldCount = 4;
+ IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+ rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // Declare BTree keys.
+ int btreeKeyFieldCount = 5;
+ IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+ btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // create value providers
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
+ LOGGER.info("Inserting into tree...");
+ }
+
+ int numInserts = 10000;
+ int[] p1xs = new int[numInserts];
+ int[] p1ys = new int[numInserts];
+ int[] p2xs = new int[numInserts];
+ int[] p2ys = new int[numInserts];
+ int[] pks = new int[numInserts];
+ int insDone = 0;
+
+ int[] insDoneCmp = new int[numInserts];
+ for (int i = 0; i < numInserts; i++) {
+ int p1x = rnd.nextInt();
+ int p1y = rnd.nextInt();
+ int p2x = rnd.nextInt();
+ int p2y = rnd.nextInt();
+ int pk = 5;
+
+ p1xs[i] = Math.min(p1x, p2x);
+ p1ys[i] = Math.min(p1y, p2y);
+ p2xs[i] = Math.max(p1x, p2x);
+ p2ys[i] = Math.max(p1y, p2y);
+ pks[i] = pk;
+
+ TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+ Math.max(p1y, p2y), pk);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Inserting " + i);
+ }
+ }
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ insDoneCmp[i] = insDone;
+ }
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Deleting from tree...");
+ }
+ int delDone = 0;
+ for (int i = 0; i < numInserts; i++) {
+ TupleUtils.createIntegerTuple(tb, tuple, p1xs[i], p1ys[i], p2xs[i], p2ys[i], pks[i]);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if (i % 1000 == 0) {
+ LOGGER.info("Deleting " + i);
+ }
+ }
+ try {
+ indexAccessor.delete(tuple);
+ delDone++;
+ } catch (TreeIndexException e) {
+ }
+ if (insDoneCmp[i] != delDone) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
+ LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
+ }
+ break;
+ }
+ }
+ if (insDone != delDone) {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+ }
+ break;
+ }
+ }
+ treeIndex.close();
+ }
+
+ /**
+ * Bulk load example.
+ *
+ * Load a tree with 10,000 tuples.
+ *
+ */
+ @Test
+ public void bulkLoadExample() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Bulk load example");
+ }
+ // Declare fields.
+ int fieldCount = 5;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ // Declare RTree keys.
+ int rtreeKeyFieldCount = 4;
+ IBinaryComparatorFactory[] rtreeCmpFactories = new IBinaryComparatorFactory[rtreeKeyFieldCount];
+ rtreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ rtreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // Declare BTree keys.
+ int btreeKeyFieldCount = 5;
+ IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
+ btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ btreeCmpFactories[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // create value providers
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ rtreeCmpFactories.length, IntegerPointable.FACTORY);
+
+ int indexFileId = getIndexFileId();
+ ITreeIndex treeIndex = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ treeIndex.create(indexFileId);
+ treeIndex.open(indexFileId);
+
+ // Load records.
+ int numInserts = 10000;
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Bulk loading " + numInserts + " tuples");
+ }
+ long start = System.currentTimeMillis();
+ IIndexBulkLoadContext bulkLoadCtx = treeIndex.beginBulkLoad(0.7f);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+
+ for (int i = 0; i < numInserts; i++) {
+ int p1x = rnd.nextInt();
+ int p1y = rnd.nextInt();
+ int p2x = rnd.nextInt();
+ int p2y = rnd.nextInt();
+
+ int pk = 5;
+
+ TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+ Math.max(p1y, p2y), pk);
+ treeIndex.bulkLoadAddTuple(tuple, bulkLoadCtx);
+ }
+
+ treeIndex.endBulkLoad(bulkLoadCtx);
+ long end = System.currentTimeMillis();
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(numInserts + " tuples loaded in " + (end - start) + "ms");
+ }
+
+ ITreeIndexAccessor indexAccessor = treeIndex.createAccessor();
+
+ // Build key.
+ ArrayTupleBuilder keyTb = new ArrayTupleBuilder(rtreeKeyFieldCount);
+ ArrayTupleReference key = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+ rangeSearch(rtreeCmpFactories, indexAccessor, fieldSerdes, key);
+
+ treeIndex.close();
+ }
+
+ private void scan(ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Scan:");
+ }
+ ITreeIndexCursor scanCursor = indexAccessor.createSearchCursor();
+ SearchPredicate nullPred = new SearchPredicate(null, null);
+ indexAccessor.search(scanCursor, nullPred);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ scanCursor.close();
+ }
+ }
+
+ private void diskOrderScan(ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
+ throws Exception {
+ try {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Disk-Order Scan:");
+ }
+ TreeDiskOrderScanCursor diskOrderCursor = (TreeDiskOrderScanCursor) indexAccessor
+ .createDiskOrderScanCursor();
+ indexAccessor.diskOrderScan(diskOrderCursor);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ diskOrderCursor.close();
+ }
+ } catch (UnsupportedOperationException e) {
+ // Ignore exception because some indexes, e.g. the LSMRTree, don't
+ // support disk-order scan.
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Ignoring disk-order scan since it's not supported.");
+ }
+ }
+ }
+
+ private void rangeSearch(IBinaryComparatorFactory[] cmpFactories, ITreeIndexAccessor indexAccessor,
+ ISerializerDeserializer[] fieldSerdes, ITupleReference key) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ String kString = TupleUtils.printTuple(key, fieldSerdes);
+ LOGGER.info("Range-Search using key: " + kString);
+ }
+ ITreeIndexCursor rangeCursor = indexAccessor.createSearchCursor();
+ MultiComparator cmp = RTreeUtils.getSearchMultiComparator(cmpFactories, key);
+ SearchPredicate rangePred = new SearchPredicate(key, cmp);
+ indexAccessor.search(rangeCursor, rangePred);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info(rec);
+ }
+ }
+ } finally {
+ rangeCursor.close();
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
new file mode 100644
index 0000000..cdd6ee0
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeInsertTest.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+
+/**
+ * Tests the RTree insert operation with integer and double fields using various
+ * numbers of dimensions and payload fields.
+ *
+ * Each tests first fills an RTree with randomly generated tuples. We compare
+ * the following operations against expected results: 1. RTree scan. 3.
+ * Disk-order scan. 4. Range search.
+ *
+ */
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeInsertTest extends AbstractRTreeTestDriver {
+
+ private final RTreeTestUtils rTreeTestUtils;
+
+ public AbstractRTreeInsertTest() {
+ this.rTreeTestUtils = new RTreeTestUtils();
+ }
+
+ @Override
+ protected void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception {
+ AbstractRTreeTestContext ctx = createTestContext(fieldSerdes, valueProviderFactories, numKeys);
+ // We assume all fieldSerdes are of the same type. Check the first one
+ // to determine which field types to generate.
+ if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
+ rTreeTestUtils.insertIntTuples(ctx, numTuplesToInsert, getRandom());
+ } else if (fieldSerdes[0] instanceof DoubleSerializerDeserializer) {
+ rTreeTestUtils.insertDoubleTuples(ctx, numTuplesToInsert, getRandom());
+ }
+
+ rTreeTestUtils.checkScan(ctx);
+ rTreeTestUtils.checkDiskOrderScan(ctx);
+ rTreeTestUtils.checkRangeSearch(ctx, key);
+ ctx.getIndex().close();
+ }
+
+ @Override
+ protected String getTestOpName() {
+ return "Insert";
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
new file mode 100644
index 0000000..ddb8a73
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeMultiThreadTest.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexMultiThreadTestDriver;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeMultiThreadTest {
+
+ protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
+
+ // Machine-specific number of threads to use for testing.
+ protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
+ // Excessive number of threads for testing.
+ protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
+ protected final int NUM_OPERATIONS = 10000;
+
+ protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
+
+ protected abstract void setUp() throws HyracksException;
+
+ protected abstract void tearDown() throws HyracksDataException;
+
+ protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
+ IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+ IPrimitiveValueProviderFactory[] valueProviderFactories) throws TreeIndexException;
+
+ protected abstract int getFileId();
+
+ protected abstract ITreeIndexTestWorkerFactory getWorkerFactory();
+
+ protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
+
+ protected abstract String getIndexTypeName();
+
+ protected static float[] getUniformOpProbs(TestOperation[] ops) {
+ float[] opProbs = new float[ops.length];
+ for (int i = 0; i < ops.length; i++) {
+ opProbs[i] = 1.0f / (float) ops.length;
+ }
+ return opProbs;
+ }
+
+ protected void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, int numThreads,
+ TestWorkloadConf conf, String dataMsg) throws HyracksException, InterruptedException, TreeIndexException {
+ setUp();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ String indexTypeName = getIndexTypeName();
+ LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ + "; Workload: " + conf.toString() + ".");
+ }
+
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+ IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
+ IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
+ fieldSerdes.length);
+
+ ITreeIndex index = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ ITreeIndexTestWorkerFactory workerFactory = getWorkerFactory();
+
+ // 4 batches per thread.
+ int batchSize = (NUM_OPERATIONS / numThreads) / 4;
+
+ TreeIndexMultiThreadTestDriver driver = new TreeIndexMultiThreadTestDriver(index, workerFactory, fieldSerdes,
+ conf.ops, conf.opProbs);
+ driver.init(getFileId());
+ long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
+ driver.deinit();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
+ }
+
+ tearDown();
+ }
+
+ @Test
+ public void twoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ int numKeys = 4;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, IntegerPointable.FACTORY);
+
+ String dataMsg = "Two Dimensions Of Integer Values";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+
+ //@Test
+ public void fourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
+ ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+ int numKeys = 8;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, DoublePointable.FACTORY);
+
+ String dataMsg = "Four Dimensions Of Double Values";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
new file mode 100644
index 0000000..9affc47
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestContext.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.Collection;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeTestContext extends TreeIndexTestContext<RTreeCheckTuple> {
+ private final ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<RTreeCheckTuple>();
+
+ public AbstractRTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+ super(fieldSerdes, treeIndex);
+ }
+
+ @Override
+ public Collection<RTreeCheckTuple> getCheckTuples() {
+ return checkTuples;
+ }
+
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
new file mode 100644
index 0000000..18401b0
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTestDriver.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeTestDriver {
+ protected final Logger LOGGER = Logger.getLogger(AbstractRTreeTestDriver.class.getName());
+
+ protected static final int numTuplesToInsert = 10000;
+
+ protected abstract AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception;
+
+ protected abstract Random getRandom();
+
+ protected abstract void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, ITupleReference key) throws Exception;
+
+ protected abstract String getTestOpName();
+
+ @Test
+ public void twoDimensionsInt() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Integer Keys.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ int numKeys = 4;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, IntegerPointable.FACTORY);
+ // Range search, the rectangle bottom left coordinates are -1000, -1000
+ // and the top right coordinates are 1000, 1000
+ ITupleReference key = TupleUtils.createIntegerTuple(-1000, -1000, 1000, 1000);
+
+ runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+ }
+
+ @Test
+ public void twoDimensionsDouble() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("RTree " + getTestOpName() + " Test With Two Dimensions With Double Keys.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+ int numKeys = 4;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, DoublePointable.FACTORY);
+ // Range search, the rectangle bottom left coordinates are -1000.0,
+ // -1000.0 and the top right coordinates are 1000.0, 1000.0
+ ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, 1000.0, 1000.0);
+
+ runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+ }
+
+ @Test
+ public void fourDimensionsDouble() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("RTree " + getTestOpName() + " Test With Four Dimensions With Double Keys.");
+ }
+
+ ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+ int numKeys = 8;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, DoublePointable.FACTORY);
+ // Range search, the rectangle bottom left coordinates are -1000.0,
+ // -1000.0, -1000.0, -1000.0 and the top right coordinates are 1000.0,
+ // 1000.0, 1000.0, 1000.0
+ ITupleReference key = TupleUtils.createDoubleTuple(-1000.0, -1000.0, -1000.0, -1000.0, 1000.0, 1000.0, 1000.0,
+ 1000.0);
+
+ runTest(fieldSerdes, valueProviderFactories, numKeys, key);
+
+ }
+}
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
new file mode 100644
index 0000000..98800e5
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeCheckTuple.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+
+@SuppressWarnings({ "rawtypes", "unchecked" })
+public class RTreeCheckTuple<T> extends CheckTuple {
+
+ public RTreeCheckTuple(int numFields, int numKeys) {
+ super(numFields, numKeys);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
+ for (int i = 0; i < tuple.length; i++) {
+ int cmp = tuple[i].compareTo(other.get(i));
+ if (cmp != 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ public boolean intersect(T o) {
+ RTreeCheckTuple<T> other = (RTreeCheckTuple<T>) o;
+ int maxFieldPos = numKeys / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int cmp = tuple[i].compareTo(other.get(j));
+ if (cmp > 0) {
+ return false;
+ }
+ cmp = tuple[j].compareTo(other.get(i));
+ if (cmp < 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+}
\ No newline at end of file
diff --git a/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
new file mode 100644
index 0000000..c966088
--- /dev/null
+++ b/hyracks-test-support/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTestUtils.java
@@ -0,0 +1,227 @@
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import static org.junit.Assert.fail;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.Random;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.CheckTuple;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.common.TreeIndexTestUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public class RTreeTestUtils extends TreeIndexTestUtils {
+ private static final Logger LOGGER = Logger.getLogger(RTreeTestUtils.class.getName());
+
+ @SuppressWarnings("unchecked")
+ // Create a new ArrayList containing the elements satisfying the search key
+ public ArrayList<RTreeCheckTuple> getRangeSearchExpectedResults(ArrayList<RTreeCheckTuple> checkTuples,
+ RTreeCheckTuple key) {
+ ArrayList<RTreeCheckTuple> expectedResult = new ArrayList<RTreeCheckTuple>();
+ Iterator<RTreeCheckTuple> iter = checkTuples.iterator();
+ while (iter.hasNext()) {
+ RTreeCheckTuple t = iter.next();
+ if (t.intersect(key)) {
+ expectedResult.add(t);
+ }
+ }
+ return expectedResult;
+ }
+
+ public void checkRangeSearch(ITreeIndexTestContext ictx, ITupleReference key) throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("Testing Range Search.");
+ }
+ AbstractRTreeTestContext ctx = (AbstractRTreeTestContext) ictx;
+ MultiComparator cmp = RTreeUtils.getSearchMultiComparator(ctx.getComparatorFactories(), key);
+
+ ITreeIndexCursor searchCursor = ctx.getIndexAccessor().createSearchCursor();
+ SearchPredicate searchPred = new SearchPredicate(key, cmp);
+ ctx.getIndexAccessor().search(searchCursor, searchPred);
+
+ // Get the subset of elements from the expected set within given key
+ // range.
+ RTreeCheckTuple keyCheck = (RTreeCheckTuple) createCheckTupleFromTuple(key, ctx.getFieldSerdes(),
+ cmp.getKeyFieldCount());
+
+ ArrayList<RTreeCheckTuple> expectedResult = null;
+
+ expectedResult = getRangeSearchExpectedResults((ArrayList<RTreeCheckTuple>) ctx.getCheckTuples(), keyCheck);
+ checkExpectedResults(searchCursor, expectedResult, ctx.getFieldSerdes(), ctx.getKeyFieldCount(), null);
+ }
+
+ @SuppressWarnings("unchecked")
+ public void insertDoubleTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ double[] fieldValues = new double[ctx.getFieldCount()];
+ // Scale range of values according to number of keys.
+ // For example, for 2 keys we want the square root of numTuples, for 3
+ // keys the cube root of numTuples, etc.
+ double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ for (int i = 0; i < numTuples; i++) {
+ // Set keys.
+ setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = j;
+ }
+ TupleUtils.createDoubleTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
+ if (LOGGER.isLoggable(Level.INFO)) {
+ if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
+ LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
+ }
+ }
+ try {
+ ctx.getIndexAccessor().insert(ctx.getTuple());
+ ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
+ } catch (TreeIndexException e) {
+ // We set expected values only after insertion succeeds because
+ // we
+ // ignore duplicate keys.
+ }
+ }
+ }
+
+ protected void setDoubleKeyFields(double[] fieldValues, int numKeyFields, double maxValue, Random rnd) {
+ int maxFieldPos = numKeyFields / 2;
+ for (int j = 0; j < maxFieldPos; j++) {
+ int k = maxFieldPos + j;
+ double firstValue = rnd.nextDouble() % maxValue;
+ double secondValue;
+ do {
+ secondValue = rnd.nextDouble() % maxValue;
+ } while (secondValue < firstValue);
+ fieldValues[j] = firstValue;
+ fieldValues[k] = secondValue;
+ }
+ }
+
+ @SuppressWarnings("unchecked")
+ protected CheckTuple createDoubleCheckTuple(double[] fieldValues, int numKeyFields) {
+ RTreeCheckTuple<Double> checkTuple = new RTreeCheckTuple<Double>(fieldValues.length, numKeyFields);
+ for (double v : fieldValues) {
+ checkTuple.add(v);
+ }
+ return checkTuple;
+ }
+
+ @SuppressWarnings("unchecked")
+ public void bulkLoadDoubleTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
+ int fieldCount = ctx.getFieldCount();
+ int numKeyFields = ctx.getKeyFieldCount();
+ double[] fieldValues = new double[ctx.getFieldCount()];
+ double maxValue = Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
+ Collection<CheckTuple> tmpCheckTuples = createCheckTuplesCollection();
+ for (int i = 0; i < numTuples; i++) {
+ // Set keys.
+ setDoubleKeyFields(fieldValues, numKeyFields, maxValue, rnd);
+ // Set values.
+ for (int j = numKeyFields; j < fieldCount; j++) {
+ fieldValues[j] = j;
+ }
+
+ // Set expected values.
+ ctx.insertCheckTuple(createDoubleCheckTuple(fieldValues, ctx.getKeyFieldCount()), tmpCheckTuples);
+ }
+ bulkLoadCheckTuples(ctx, tmpCheckTuples);
+
+ // Add tmpCheckTuples to ctx check tuples for comparing searches.
+ for (CheckTuple checkTuple : tmpCheckTuples) {
+ ctx.insertCheckTuple(checkTuple, ctx.getCheckTuples());
+ }
+ }
+
+ @Override
+ public void checkExpectedResults(ITreeIndexCursor cursor, Collection checkTuples,
+ ISerializerDeserializer[] fieldSerdes, int keyFieldCount, Iterator<CheckTuple> checkIter) throws Exception {
+ int actualCount = 0;
+ try {
+ while (cursor.hasNext()) {
+ cursor.next();
+ ITupleReference tuple = cursor.getTuple();
+ RTreeCheckTuple checkTuple = (RTreeCheckTuple) createCheckTupleFromTuple(tuple, fieldSerdes,
+ keyFieldCount);
+ if (!checkTuples.contains(checkTuple)) {
+ fail("Scan or range search returned unexpected answer: " + checkTuple.toString());
+ }
+ actualCount++;
+ }
+ if (actualCount < checkTuples.size()) {
+ fail("Scan or range search returned fewer answers than expected.\nExpected: " + checkTuples.size()
+ + "\nActual : " + actualCount);
+ }
+ if (actualCount > checkTuples.size()) {
+ fail("Scan or range search returned more answers than expected.\nExpected: " + checkTuples.size()
+ + "\nActual : " + actualCount);
+ }
+ } finally {
+ cursor.close();
+ }
+ }
+
+ @Override
+ protected CheckTuple createCheckTuple(int numFields, int numKeyFields) {
+ return new RTreeCheckTuple(numFields, numKeyFields);
+ }
+
+ @Override
+ protected ISearchPredicate createNullSearchPredicate() {
+ return new SearchPredicate(null, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ protected CheckTuple createIntCheckTuple(int[] fieldValues, int numKeyFields) {
+ RTreeCheckTuple<Integer> checkTuple = new RTreeCheckTuple<Integer>(fieldValues.length, numKeyFields);
+ for (int v : fieldValues) {
+ checkTuple.add(v);
+ }
+ return checkTuple;
+ }
+
+ @Override
+ protected void setIntKeyFields(int[] fieldValues, int numKeyFields, int maxValue, Random rnd) {
+ int maxFieldPos = numKeyFields / 2;
+ for (int j = 0; j < maxFieldPos; j++) {
+ int k = maxFieldPos + j;
+ int firstValue = rnd.nextInt() % maxValue;
+ int secondValue;
+ do {
+ secondValue = rnd.nextInt() % maxValue;
+ } while (secondValue < firstValue);
+ fieldValues[j] = firstValue;
+ fieldValues[k] = secondValue;
+ }
+ }
+
+ @Override
+ protected Collection createCheckTuplesCollection() {
+ return new ArrayList<RTreeCheckTuple>();
+ }
+
+ @Override
+ protected ArrayTupleBuilder createDeleteTupleBuilder(ITreeIndexTestContext ctx) {
+ return new ArrayTupleBuilder(ctx.getFieldCount());
+ }
+
+ @Override
+ protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, ITreeIndexTestContext ctx) throws HyracksDataException {
+ return ctx.getCheckTuples().contains(checkTuple);
+ }
+}